join_channel_audio.dart 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267
  1. import 'dart:async';
  2. import 'dart:developer';
  3. import 'package:agora_rtc_engine/rtc_engine.dart';
  4. import 'package:agora_rtc_engine_example/config/agora.config.dart' as config;
  5. import 'package:flutter/cupertino.dart';
  6. import 'package:flutter/foundation.dart';
  7. import 'package:flutter/material.dart';
  8. import 'package:permission_handler/permission_handler.dart';
  9. /// JoinChannelAudio Example
  10. class JoinChannelAudio extends StatefulWidget {
  11. @override
  12. State<StatefulWidget> createState() => _State();
  13. }
  14. class _State extends State<JoinChannelAudio> {
  15. late final RtcEngine _engine;
  16. String channelId = config.channelId;
  17. bool isJoined = false,
  18. openMicrophone = true,
  19. enableSpeakerphone = true,
  20. playEffect = false;
  21. bool _enableInEarMonitoring = false;
  22. double _recordingVolume = 0, _playbackVolume = 0, _inEarMonitoringVolume = 0;
  23. TextEditingController? _controller;
  24. @override
  25. void initState() {
  26. super.initState();
  27. _controller = TextEditingController(text: channelId);
  28. this._initEngine();
  29. }
  30. @override
  31. void dispose() {
  32. super.dispose();
  33. _engine.destroy();
  34. }
  35. _initEngine() async {
  36. _engine = await RtcEngine.createWithContext(RtcEngineContext(config.appId));
  37. this._addListeners();
  38. await _engine.enableAudio();
  39. await _engine.setChannelProfile(ChannelProfile.LiveBroadcasting);
  40. await _engine.setClientRole(ClientRole.Broadcaster);
  41. }
  42. _addListeners() {
  43. _engine.setEventHandler(RtcEngineEventHandler(
  44. joinChannelSuccess: (channel, uid, elapsed) {
  45. log('joinChannelSuccess ${channel} ${uid} ${elapsed}');
  46. setState(() {
  47. isJoined = true;
  48. });
  49. },
  50. leaveChannel: (stats) async {
  51. log('leaveChannel ${stats.toJson()}');
  52. setState(() {
  53. isJoined = false;
  54. });
  55. },
  56. ));
  57. }
  58. _joinChannel() async {
  59. if (defaultTargetPlatform == TargetPlatform.android) {
  60. await Permission.microphone.request();
  61. }
  62. await _engine
  63. .joinChannel(config.token, config.channelId, null, config.uid)
  64. .catchError((onError) {
  65. print('error ${onError.toString()}');
  66. });
  67. }
  68. _leaveChannel() async {
  69. await _engine.leaveChannel();
  70. }
  71. _switchMicrophone() {
  72. _engine.enableLocalAudio(!openMicrophone).then((value) {
  73. setState(() {
  74. openMicrophone = !openMicrophone;
  75. });
  76. }).catchError((err) {
  77. log('enableLocalAudio $err');
  78. });
  79. }
  80. _switchSpeakerphone() {
  81. _engine.setEnableSpeakerphone(!enableSpeakerphone).then((value) {
  82. setState(() {
  83. enableSpeakerphone = !enableSpeakerphone;
  84. });
  85. }).catchError((err) {
  86. log('setEnableSpeakerphone $err');
  87. });
  88. }
  89. _switchEffect() async {
  90. if (playEffect) {
  91. _engine.stopEffect(1).then((value) {
  92. setState(() {
  93. playEffect = false;
  94. });
  95. }).catchError((err) {
  96. log('stopEffect $err');
  97. });
  98. } else {
  99. _engine
  100. .playEffect(
  101. 1,
  102. await (_engine.getAssetAbsolutePath("assets/Sound_Horizon.mp3")
  103. as FutureOr<String>),
  104. -1,
  105. 1,
  106. 1,
  107. 100,
  108. true)
  109. .then((value) {
  110. setState(() {
  111. playEffect = true;
  112. });
  113. }).catchError((err) {
  114. log('playEffect $err');
  115. });
  116. }
  117. }
  118. _onChangeInEarMonitoringVolume(double value) {
  119. setState(() {
  120. _inEarMonitoringVolume = value;
  121. });
  122. _engine.setInEarMonitoringVolume(value.toInt());
  123. }
  124. _toggleInEarMonitoring(value) {
  125. setState(() {
  126. _enableInEarMonitoring = value;
  127. });
  128. _engine.enableInEarMonitoring(value);
  129. }
  130. @override
  131. Widget build(BuildContext context) {
  132. return Stack(
  133. children: [
  134. Column(
  135. children: [
  136. TextField(
  137. controller: _controller,
  138. decoration: InputDecoration(hintText: 'Channel ID'),
  139. onChanged: (text) {
  140. setState(() {
  141. channelId = text;
  142. });
  143. },
  144. ),
  145. Row(
  146. children: [
  147. Expanded(
  148. flex: 1,
  149. child: ElevatedButton(
  150. onPressed:
  151. isJoined ? this._leaveChannel : this._joinChannel,
  152. child: Text('${isJoined ? 'Leave' : 'Join'} channel'),
  153. ),
  154. )
  155. ],
  156. ),
  157. ],
  158. ),
  159. Align(
  160. alignment: Alignment.bottomRight,
  161. child: Padding(
  162. child: Column(
  163. mainAxisSize: MainAxisSize.min,
  164. crossAxisAlignment: CrossAxisAlignment.end,
  165. children: [
  166. ElevatedButton(
  167. onPressed: this._switchMicrophone,
  168. child: Text('Microphone ${openMicrophone ? 'on' : 'off'}'),
  169. ),
  170. ElevatedButton(
  171. onPressed: this._switchSpeakerphone,
  172. child:
  173. Text(enableSpeakerphone ? 'Speakerphone' : 'Earpiece'),
  174. ),
  175. ElevatedButton(
  176. onPressed: this._switchEffect,
  177. child: Text('${playEffect ? 'Stop' : 'Play'} effect'),
  178. ),
  179. Row(
  180. mainAxisAlignment: MainAxisAlignment.end,
  181. children: [
  182. Text('RecordingVolume:'),
  183. Slider(
  184. value: _recordingVolume,
  185. min: 0,
  186. max: 400,
  187. divisions: 5,
  188. label: 'RecordingVolume',
  189. onChanged: (double value) {
  190. setState(() {
  191. _recordingVolume = value;
  192. });
  193. _engine.adjustRecordingSignalVolume(value.toInt());
  194. },
  195. )
  196. ],
  197. ),
  198. Row(
  199. mainAxisAlignment: MainAxisAlignment.end,
  200. children: [
  201. Text('PlaybackVolume:'),
  202. Slider(
  203. value: _playbackVolume,
  204. min: 0,
  205. max: 400,
  206. divisions: 5,
  207. label: 'PlaybackVolume',
  208. onChanged: (double value) {
  209. setState(() {
  210. _playbackVolume = value;
  211. });
  212. _engine.adjustPlaybackSignalVolume(value.toInt());
  213. },
  214. )
  215. ],
  216. ),
  217. Column(
  218. mainAxisSize: MainAxisSize.min,
  219. crossAxisAlignment: CrossAxisAlignment.end,
  220. children: [
  221. Row(mainAxisSize: MainAxisSize.min, children: [
  222. Text('InEar Monitoring Volume:'),
  223. Switch(
  224. value: _enableInEarMonitoring,
  225. onChanged: _toggleInEarMonitoring,
  226. activeTrackColor: Colors.grey[350],
  227. activeColor: Colors.white,
  228. )
  229. ]),
  230. if (_enableInEarMonitoring)
  231. Container(
  232. width: 300,
  233. child: Slider(
  234. value: _inEarMonitoringVolume,
  235. min: 0,
  236. max: 100,
  237. divisions: 5,
  238. label: 'InEar Monitoring Volume',
  239. onChanged: _onChangeInEarMonitoringVolume,
  240. ))
  241. ],
  242. ),
  243. ],
  244. ),
  245. padding: EdgeInsets.symmetric(vertical: 20, horizontal: 0),
  246. ))
  247. ],
  248. );
  249. }
  250. }