일 | 월 | 화 | 수 | 목 | 금 | 토 |
---|---|---|---|---|---|---|
1 | 2 | |||||
3 | 4 | 5 | 6 | 7 | 8 | 9 |
10 | 11 | 12 | 13 | 14 | 15 | 16 |
17 | 18 | 19 | 20 | 21 | 22 | 23 |
24 | 25 | 26 | 27 | 28 | 29 | 30 |
- jszip
- Prism.js
- userevent_tracker
- KakaoMap
- web track
- uint16array
- FirebaseAnalytics
- Babel standalone
- Flutter
- node
- babel
- Completer
- identifierForVender
- Three-fiber
- Raycasting
- androidId
- swagger-typescript-api
- webrtc
- RouteObserver
- Image Resize typescript
- uint8array
- Excel
- Three js
- react
- REST API
- Redux
- code editor
- methodChannel
- typescript
- Game js
- Today
- Total
Never give up
WebRTC - 4. Flutter WebRTC 본문
먼저 사용한 패키지는 3가지입니다
1. flutter_webrtc : webRTC를 사용하기 편하도록 개발해놓은 패키지
2. socket_io_client : 소켓연결
3. vibration : 전화 온것같은 효과(?)
여기서 상태관리는 어디있냐 라는 의문이 들 수 있는데
해당 예제에서는 기본으로 제공하는 setState, ValueListenableBuilder로 구현했습니다
main.dart
import 'package:flutter/material.dart';
import 'package:web_rtc/view/webrtc_main_view.dart';
void main() {
runApp(const MaterialApp(home: WebRTCMainView()));
}
models
// iceCandidate_model
class IceCandidateModel {
String? candidate;
String? sdpMid;
int? sdpMLineIndex;
String? to;
IceCandidateModel({
this.candidate,
this.sdpMid,
this.sdpMLineIndex,
this.to,
});
factory IceCandidateModel.fromJson(Map json) {
return IceCandidateModel(
candidate: json['candidate'],
sdpMid: json['sdpMid'],
sdpMLineIndex: json['sdpMLineIndex'],
to: json['to'],
);
}
Map<String, dynamic> toJson() {
return {
'candidate': candidate,
'sdpMid': sdpMid,
'sdpMLineIndex': sdpMLineIndex,
'to': to
};
}
}
// webrtc_model
class WebRTCModel {
String? from;
String? to;
String? offerSDP;
String? offerType;
String? answerSDP;
String? answerType;
bool? audioOnly;
WebRTCModel({
this.from,
this.to,
this.offerSDP,
this.offerType,
this.answerSDP,
this.answerType,
this.audioOnly,
});
factory WebRTCModel.fromJson(Map json) {
return WebRTCModel(
from: json['from'],
to: json['to'],
offerSDP: json['offerSDP'],
offerType: json['offerType'],
answerSDP: json['answerSDP'],
answerType: json['answerType'],
audioOnly: json['audioOnly']);
}
Map<String, dynamic> toJson() {
return {
'from': from,
'to': to,
'offerSDP': offerSDP,
'offerType': offerType,
'answerSDP': answerSDP,
'answerType': answerType,
'audioOnly': audioOnly
};
}
}
react에서 사용한것과 유사한 모델을 만들어서 사용했습니다
webrtc_socket
import 'dart:async';
import 'package:socket_io_client/socket_io_client.dart' as io;
import 'package:flutter/material.dart';
class WebRTCSocket {
late io.Socket _socket;
String? user;
Future<String?> connectSocket() {
final Completer<String> completer = Completer<String>();
_socket = io.io('your socket',
io.OptionBuilder().setTransports(['websocket']).build());
_socket.onConnect((data) {
user = _socket.id;
completer.complete(user);
debugPrint('[socket] connected : $user');
});
return completer.future;
}
void socketOn(String event, void Function(dynamic) callback) {
_socket.on(event, callback);
}
void socketEmit(String event, dynamic data) {
_socket.emit(event, data);
}
void disconnectSocket() {
_socket.dispose();
}
}
중복 코드를 제거하기 위해 간단하게 구성을 해봤습니다
소켓 연결이 완료 되었을때 비동기처리를 완료할 수 있도록 completer를 사용했습니다
web_rtc_controller
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:vibration/vibration.dart';
import 'package:web_rtc/model/icecandidate_model.dart';
import 'package:web_rtc/model/webrtc_model.dart';
import 'package:web_rtc/service/webrtc_socket.dart';
enum ScreenState { loading, initDone, receivedCalling }
class WebRTCController extends WebRTCSocket {
/// 상대방
String? to;
/// 본인
String? _from;
/// 연결대상, 본인
RTCPeerConnection? _peer;
/// 본인 비디오 렌더러
RTCVideoRenderer? localRenderer = RTCVideoRenderer();
/// 상대방 비디오 렌더러
RTCVideoRenderer? remoteRenderer = RTCVideoRenderer();
/// 유저 리스트 처리용
ValueNotifier<List<String>> userListNotifier =
ValueNotifier<List<String>>([]);
/// 본인 비디오 렌더 상태관리
ValueNotifier<bool> localVideoNotifier = ValueNotifier<bool>(false);
/// 상대방 비디오 렌더 상태관리
ValueNotifier<bool> remoteVideoNotifier = ValueNotifier<bool>(false);
/// [_receiveOffer] 발생시, [sendAnswer] 부분 데이터 처리
RTCSessionDescription? _answer;
/// [WebRTCListView] 부분 state 처리
ValueNotifier<ScreenState> screenNotifier =
ValueNotifier<ScreenState>(ScreenState.loading);
/// [WebRTCView] context. Navigator.pop 용도
BuildContext? webRTCVideoViewContext;
/// offer/answer 과정 완료 후 send
final List<IceCandidateModel> _candidateList = [];
/// 본인 비디오
MediaStream? _localStream;
/// iceCandidate 연결 여부
bool _isConnected = false;
bool audioOnly = false;
/// [_initSocket], [_initPeer] 소켓, 피어, 렌더러 초기화
Future<void> initHandler() async {
await _initSocket();
await _initPeer();
await localRenderer!.initialize();
await remoteRenderer!.initialize();
screenNotifier.value = ScreenState.initDone;
}
/// 역할
void dispose() {
userListNotifier.dispose();
localVideoNotifier.dispose();
remoteVideoNotifier.dispose();
screenNotifier.dispose();
localRenderer?.dispose();
remoteRenderer?.dispose();
_localStream?.dispose();
_peer?.dispose();
super.disconnectSocket();
}
/// 소켓 초기화
Future<void> _initSocket() async {
_from = await super.connectSocket();
if (_from != null) {
super.socketOn('updateUserlist', _updateUserList);
super.socketOn('connect_error', (data) {
debugPrint('[socket] error : $data');
});
super.socketOn('connect_timeout', (data) {
debugPrint('[socket] error : $data');
});
super.socketOn('offer', _receiveOffer);
super.socketOn('refuse', _refusedConnection);
super.socketOn('answer', _receiveAnswer);
super.socketOn('remoteIceCandidate', _remotePeerIceCandidate);
super.socketOn('disconnectPeer', close);
}
}
/// [_peer] 초기화
Future<void> _initPeer() async {
_peer = await createPeerConnection({
'iceServers': [
{'url': 'stun:stun.l.google.com:19302'},
],
});
_peer!.onIceCandidate = _iceCandidateEvent;
_peer!.onTrack = _remoteStream;
_peer!.onConnectionState = _peerStateChange;
}
/// [소켓] 유저가 로그인/로그아웃 일때마다 업데이트
void _updateUserList(data) {
debugPrint('[socket] userList update $data');
Map<String, dynamic> map = Map.castFrom(data);
List<String> list = List.from(map['userList']);
debugPrint('[socket] list : $list');
list.removeWhere((element) => element == super.user);
userListNotifier.value = list;
}
/// [본인] 영상통화 offer 보냄
Future<void> sendOffer() async {
if (to == null) {
return;
}
await turnOnMedia();
final RTCSessionDescription offer = await _peer!.createOffer({
'mandatory': {
'OfferToReceiveAudio': true,
'OfferToReceiveVideo': !audioOnly,
}
});
await _peer!.setLocalDescription(offer);
WebRTCModel model = WebRTCModel();
model.offerType = offer.type;
model.offerSDP = offer.sdp;
model.to = to;
model.from = _from;
model.audioOnly = audioOnly;
debugPrint('[webRTC] send offer : ${model.from} to ${model.to}');
super.socketEmit('offer', model.toJson());
}
/// [상대방] 영상통화 offer 받음
void _receiveOffer(data) async {
WebRTCModel model = WebRTCModel.fromJson(data);
audioOnly = model.audioOnly!;
debugPrint('[webRTC] receive offer : ${model.to} from ${model.from}');
await _peer!.setRemoteDescription(
RTCSessionDescription(model.offerSDP, model.offerType));
await turnOnMedia();
_answer = await _peer!.createAnswer({
'mandatory': {
'OfferToReceiveAudio': true,
'OfferToReceiveVideo': !audioOnly,
}
});
await _peer!.setLocalDescription(_answer!);
to = model.from;
screenNotifier.value = ScreenState.receivedCalling;
if (await Vibration.hasVibrator() ?? false) {
Vibration.vibrate(duration: 1500);
}
}
/// [상대방] 통화 거절 보냄
Future<void> refuseOffer() async {
socketEmit('refuse', {'to': to});
await _resetElements();
screenNotifier.value = ScreenState.initDone;
}
/// [본인] 통화 거절 받음
void _refusedConnection(_) async {
await close(_);
}
/// [상대방] 영상통화 offer에 대한 answer
void sendAnswer() {
debugPrint('[webRTC] send answer to $to');
WebRTCModel model = WebRTCModel();
model.answerSDP = _answer!.sdp;
model.answerType = _answer!.type;
model.to = to;
model.audioOnly = audioOnly;
_answer = null;
Vibration.hasVibrator().then((value) {
if (value ?? false) {
Vibration.cancel();
}
});
super.socketEmit('answer', model.toJson());
}
/// [본인] 상대방 answer 받음
void _receiveAnswer(data) async {
WebRTCModel model = WebRTCModel.fromJson(data);
debugPrint('[webRTC] receive answer : ${model.answerType}');
await _peer!.setRemoteDescription(RTCSessionDescription(
model.answerSDP!.replaceFirst('useinbandfec=1',
'useinbandfec=1; stereo=1; maxaveragebitrate=510000'),
model.answerType));
for (IceCandidateModel candidateModel in _candidateList) {
if (!_isConnected) {
debugPrint('[webRTC] send iceCandidate : ${candidateModel.toJson()}');
super.socketEmit('iceCandidate', candidateModel.toJson());
break;
}
}
}
/// [본인, 상대방] ice candidate 연결 요청
void _iceCandidateEvent(RTCIceCandidate e) {
debugPrint('?????');
IceCandidateModel model = IceCandidateModel();
model.candidate = e.candidate;
model.sdpMid = e.sdpMid;
model.sdpMLineIndex = e.sdpMLineIndex;
model.to = to;
if (model.candidate == null || model.to == null) {
debugPrint('[webRTC] iceCandidate cut candidate : ${model.toJson()}');
return;
}
int index = _candidateList
.indexWhere((element) => element.candidate == model.candidate);
if (index < 0) {
_candidateList.add(model);
}
}
/// [본인, 상대방] ice candidate 연결 처리
void _remotePeerIceCandidate(data) async {
debugPrint('[webRTC] remoteIceCandidate $data');
try {
IceCandidateModel model = IceCandidateModel.fromJson(data);
RTCIceCandidate candidate =
RTCIceCandidate(model.candidate, model.sdpMid, model.sdpMLineIndex);
await _peer!.addCandidate(candidate);
} catch (e) {
debugPrint('[webRTC] remoteIceCandidate error : $e');
}
}
/// 상대방 미디어 처리
void _remoteStream(RTCTrackEvent e) {
debugPrint('[webRTC] gotRemoteStream data : ${e.track}, ${e.streams}');
MediaStream stream = e.streams.first;
remoteRenderer!.srcObject = stream;
remoteVideoNotifier.value = true;
}
/// peer state 확인용
void _peerStateChange(RTCPeerConnectionState state) {
debugPrint(
'[webRTC] peer connection state : ${state.name}, ${_peer?.connectionState}');
if (state == RTCPeerConnectionState.RTCPeerConnectionStateConnected &&
!_isConnected) {
_isConnected = true;
} else if (state == RTCPeerConnectionState.RTCPeerConnectionStateFailed) {
_peer?.restartIce();
}
}
/// [본인] 미디어 on
Future<void> turnOnMedia() async {
try {
_localStream = await navigator.mediaDevices.getUserMedia({
'video': audioOnly ? false : {'facingMode': 'user'},
'audio': {
'autoGainControl': false,
'channelCount': 2,
'echoCancellation': false,
'latency': 0,
'noiseSuppression': false,
'sampleRate': 48000,
'sampleSize': 16,
'volume': 1.0
}
});
localRenderer!.srcObject = _localStream;
localVideoNotifier.value = true;
localRenderer?.muted = true;
for (MediaStreamTrack track in _localStream!.getTracks()) {
debugPrint('track : $track, stream : $_localStream');
if (track.kind == 'audio') {
Helper.setMicrophoneMute(false, track);
}
_peer!.addTrack(track, _localStream!);
}
if (_peer!.connectionState ==
RTCPeerConnectionState.RTCPeerConnectionStateConnected) {
List<RTCRtpSender> list = await _peer!.getSenders();
debugPrint('[media] list : ${list.length}');
for (RTCRtpSender sender in list) {
debugPrint('[media] sender : $sender');
List<MediaStreamTrack> trackList = _localStream!.getTracks();
debugPrint('[media] trackList : ${trackList.length}');
int index = trackList
.indexWhere((element) => element.kind == sender.track?.kind);
debugPrint('[media] index : $index');
if (index >= 0) {
MediaStreamTrack track = trackList[index];
debugPrint('[media] track : $track');
await sender.replaceTrack(track);
debugPrint('[media] replace track');
}
}
}
} catch (e) {
debugPrint('[webRTC] media error : $e');
}
}
/// [본인] 미디어 off
Future<void> turnOffMedia() async {
if (localRenderer!.srcObject != null) {
localRenderer!.srcObject = null;
localVideoNotifier.value = false;
for (MediaStreamTrack track in _localStream!.getTracks()) {
track.enabled = false;
await Future.delayed(const Duration(milliseconds: 300));
await track.stop();
}
await _localStream?.dispose();
_localStream = null;
}
}
/// 연결 종료
Future<void> close(_) async {
debugPrint('[webRTC] close peer : $_peer');
if (webRTCVideoViewContext == null) {
return;
}
super.socketEmit('disconnectPeer', {'to': to});
// ignore: use_build_context_synchronously
Navigator.pop(webRTCVideoViewContext!);
webRTCVideoViewContext = null;
await _resetElements();
}
/// [_peer], [localRenderer], [remoteRenderer] 초기화
Future<void> _resetElements() async {
await turnOffMedia();
_candidateList.clear();
_peer?.close();
_peer = null;
await _initPeer();
await localRenderer?.dispose();
await remoteRenderer?.dispose();
localRenderer = null;
remoteRenderer = null;
localRenderer = RTCVideoRenderer();
remoteRenderer = RTCVideoRenderer();
await localRenderer!.initialize();
await remoteRenderer!.initialize();
localVideoNotifier.value = false;
remoteVideoNotifier.value = false;
_isConnected = false;
await _localStream?.dispose();
_localStream = null;
}
}
react에서 구현한것과 거의 동일하게 구현했습니다
그래서 자세한 설명은 생략하고 싶지만 flutter부분만 보는분들이 있을거 같아서
간단하게 설명드리자면
1. 소켓을 연결하고, 이전에 node에서 만들어놓은 이벤트들에 맞춰서 하나하나 넣어줍니다 = initSocket
2. peer connection을 초기화를 하고, 사용할 event들을 subscribe해줍니다 = initPeer
3. 소켓이 연결되면 유저리스트를 불러옵니다 = updateUserList
4. caller가 유저를 선택하고, 소켓 통신으로 offer를 보냅니다 = sendOffer
4.1. sendOffer를 보내기 위해서는 본인 media를 on해줍니다 = turnOnMedia
5. callee가 offer를 받고 answer를 만듭니다 = receiveOffer
5.1 offer수락을 위해 media를 On해줍니다 = turnOnMedia
5.2 거부시 이전에 변경된 peer, socket상태들을 초기화 합니다 = refuseAnswer
6. callee가 6에서 만든 answer를 소켓을 통해 caller에게 보냅니다 = sendAnswer
7. caller가 receive를 받고 이전에 생성된 iceCandidate를 callee에게 보냅니다 = receiveAnswer
8. callee가 iceCandidate를 받고, iceCandidate연결을 시도합니다 = remotePeerIceCandidate
이외에
turnOffMedia : media를 끕니다
iceCandidateEvent : peer가 연결을 시도할 때 이벤트가 발생하여 여러가지 경로를 찾아냅니다
여러개중 연결이 안되는 경로가 있기때문에 list에 담아놨다가 한번씩 시도를 합니다
remoteStream : peer간 연결이 완료되었을 때 비디오와 오디오를 streaming합니다
close : 소켓 및 피어, 필드등을 초기화 합니다
그리고 해당 이벤트들에 view를 변경해주기 위해 valuenotifier로 값을 변경해줍니다
조금 더 보기좋게 표현 해보자면 다음과 같습니다
1. [공통] video connection
2. [caller] peer createOffer
3. [caller] peer setLocalDescription
4. [caller] send offer
5. [callee] receive offer
6. [callee] peer setRemoteDescription
7. [callee] peer createAnswer
8. [callee] peer setLocalDescription
9. [callee] send answer
10. [caller] receive answer
11. [caller] send iceCandidate
12. [callee] receive iceCandidate
13. [callee] peer addCandidate
webrtc_main_view
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:web_rtc/service/webrtc_controller.dart';
import 'package:web_rtc/view/webrtc_view.dart';
class WebRTCMainView extends StatefulWidget {
const WebRTCMainView({Key? key}) : super(key: key);
@override
State<WebRTCMainView> createState() => _WebRTCMainViewState();
}
class _WebRTCMainViewState extends State<WebRTCMainView> {
final WebRTCController _controller = WebRTCController();
@override
void initState() {
super.initState();
_controller.initController();
}
@override
void dispose() {
_controller.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
return ValueListenableBuilder<ScreenState>(
valueListenable: _controller.screenNotifier,
builder: (_, screenState, __) {
late Widget body;
switch (screenState) {
case ScreenState.loading:
body = const Center(
child: Text('Loading...'),
);
break;
case ScreenState.initDone:
body = _initDone();
break;
case ScreenState.receivedCalling:
body = _receivedCalling();
break;
}
return Scaffold(
appBar: screenState == ScreenState.initDone
? AppBar(
title: const Text('Online User list'),
)
: null,
body: body,
floatingActionButton: screenState == ScreenState.initDone
? FloatingActionButton(
child: const Icon(Icons.call),
onPressed: () async {
await _controller.sendOffer();
_moveToVideoView();
},
)
: null,
);
},
);
}
Widget _initDone() {
return SafeArea(
child: ValueListenableBuilder<List<String>>(
valueListenable: _controller.userListNotifier,
builder: (_, list, __) {
return ListView.builder(
itemCount: list.length,
itemBuilder: (_, index) {
String userId = list[index];
return ListTile(
leading: Text('${index + 1}'),
title: Text(
userId,
style: TextStyle(
color: _controller.to == userId ? Colors.red : null,
),
),
onTap: () {
setState(() {
_controller.to = userId;
});
},
);
},
);
},
),
);
}
Widget _receivedCalling() {
return Stack(
fit: StackFit.expand,
children: [
ValueListenableBuilder<bool>(
valueListenable: _controller.localVideoNotifier,
builder: (_, value, __) {
return value
? RTCVideoView(
_controller.localRenderer!,
objectFit: RTCVideoViewObjectFit.RTCVideoViewObjectFitCover,
)
: const Center(child: Icon(Icons.person_off));
},
),
Align(
alignment: Alignment.bottomCenter,
child: Padding(
padding: const EdgeInsets.all(30.0),
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
InkWell(
onTap: () {
_controller.sendAnswer();
_moveToVideoView();
},
child: const CircleAvatar(
backgroundColor: Colors.green,
foregroundColor: Colors.white,
child: Icon(Icons.call),
),
),
InkWell(
onTap: () async {
await _controller.refuseOffer();
},
child: const CircleAvatar(
backgroundColor: Colors.red,
foregroundColor: Colors.white,
child: Icon(Icons.close),
),
),
],
),
),
)
],
);
}
void _moveToVideoView() {
// ignore: use_build_context_synchronously
Navigator.push(
context,
MaterialPageRoute(
builder: (_) => WebRTCView(
controller: _controller,
),
),
).whenComplete(() {
_controller.screenNotifier.value = ScreenState.initDone;
});
}
}
로딩중, 소켓 연결완료, 전화 받았을 때
총 3가지 화면을 해당 view에서 처리합니다
이후에 연결을 누르면 다음 view로 이동을 합니다
webrtc_view
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:web_rtc/service/webrtc_controller.dart';
class WebRTCView extends StatefulWidget {
const WebRTCView({Key? key, this.controller}) : super(key: key);
final WebRTCController? controller;
@override
State<WebRTCView> createState() => _WebRTCViewState();
}
class _WebRTCViewState extends State<WebRTCView> {
late final WebRTCController _controller;
final ValueNotifier<bool> _btnNotifier = ValueNotifier<bool>(false);
@override
void initState() {
super.initState();
_controller = widget.controller!;
}
@override
void dispose() {
_btnNotifier.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
_controller.webRTCVideoViewContext = context;
return WillPopScope(
onWillPop: () async {
return false;
},
child: SafeArea(
child: Scaffold(
body: GestureDetector(
onTap: () {
_btnNotifier.value = !_btnNotifier.value;
},
child: Stack(
children: [
_videoWidget(
_controller.remoteVideoNotifier, _controller.remoteRenderer!),
Align(
alignment: Alignment.topLeft,
child: Container(
margin: const EdgeInsets.only(top: 20, left: 20),
height: 160,
width: 120,
child: _videoWidget(
_controller.localVideoNotifier, _controller.localRenderer!),
),
),
_btnWidget()
],
),
),
),
),
);
}
Widget _btnWidget() {
return Align(
alignment: Alignment.bottomCenter,
child: ValueListenableBuilder<bool>(
valueListenable: _btnNotifier,
builder: (_, visible, __) => visible
? Padding(
padding: const EdgeInsets.only(bottom: 30),
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceAround,
children: [
InkWell(
onTap: () async {
if (_controller.localVideoNotifier.value) {
await _controller.turnOffMedia();
} else {
await _controller.turnOnMedia();
}
},
child: ValueListenableBuilder<bool>(
valueListenable: _controller.localVideoNotifier,
builder: (_, camOn, __) {
return camOn
? const CircleAvatar(
backgroundColor: Colors.yellow,
foregroundColor: Colors.white,
child: Icon(Icons.videocam_off),
)
: const CircleAvatar(
backgroundColor: Colors.green,
foregroundColor: Colors.white,
child: Icon(Icons.videocam),
);
},
),
),
InkWell(
onTap: () async {
await _controller.close(null);
},
child: const CircleAvatar(
backgroundColor: Colors.red,
foregroundColor: Colors.white,
child: Icon(Icons.close),
),
),
],
),
)
: const SizedBox(),
),
);
}
Widget _videoWidget(ValueNotifier<bool> listener, RTCVideoRenderer renderer) {
return ValueListenableBuilder<bool>(
valueListenable: listener,
builder: (_, value, __) {
return value
? RTCVideoView(
renderer,
objectFit: RTCVideoViewObjectFit.RTCVideoViewObjectFitCover,
)
: const Center(
child: Icon(Icons.person_off),
);
},
);
}
}
마지막으로 영상통화하는 부분입니다
예제는 여기서 마무리 하면 될것 같습니다
Intro : https://devmemory.tistory.com/103
Node : https://devmemory.tistory.com/104
React : https://devmemory.tistory.com/105
Outro : https://devmemory.tistory.com/107
'WebRTC' 카테고리의 다른 글
WebRTC - 5. Outro (gif, github link) (4) | 2022.10.08 |
---|---|
WebRTC - 3. React WebRTC (0) | 2022.10.08 |
WebRTC - 2. Signaling server with node express (0) | 2022.10.08 |
WebRTC - 1. intro (1) | 2022.10.08 |