liuyuqi-dellpc 2 years ago
parent
commit
b3c0588925

+ 9 - 0
android/app/build.gradle

@@ -22,11 +22,16 @@ if (flutterVersionName == null) {
 }
 
 apply plugin: 'com.android.application'
+apply plugin: 'kotlin-android'
 apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
 
 android {
     compileSdkVersion 31
 
+    sourceSets {
+        main.java.srcDirs += 'src/main/kotlin'
+    }
+
     compileOptions {
         sourceCompatibility JavaVersion.VERSION_1_8
         targetCompatibility JavaVersion.VERSION_1_8
@@ -53,3 +58,7 @@ android {
 flutter {
     source '../..'
 }
+
+dependencies {
+    implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
+}

+ 357 - 0
android/app/src/main/kotlin/me/yoqi/flutter/flutter_audio_record/MainActivity.kt

@@ -0,0 +1,357 @@
+package com.example.flutter_audio_record
+
+import android.Manifest
+import android.content.pm.PackageManager
+import android.media.AudioFormat
+import android.media.AudioRecord
+import android.media.MediaRecorder
+import android.os.Build
+import android.util.Log
+import androidx.annotation.NonNull
+import androidx.core.app.ActivityCompat
+import androidx.core.content.ContextCompat
+import io.flutter.embedding.android.FlutterActivity
+import io.flutter.embedding.engine.FlutterEngine
+import io.flutter.plugin.common.MethodCall
+import io.flutter.plugin.common.MethodChannel
+import io.flutter.plugin.common.MethodChannel.Result
+import io.flutter.plugin.common.PluginRegistry
+import java.io.*
+import java.nio.ByteBuffer
+import java.nio.ByteOrder
+import java.util.*
+
+class MainActivity: FlutterActivity() {
+    private val LOG_NAME = "AndroidAudioRecorder"
+    private val PERMISSIONS_REQUEST_RECORD_AUDIO = 200
+    private val RECORDER_BPP: Byte = 16 // we use 16bit
+
+    private val registrar: PluginRegistry.Registrar? = null
+    private var mSampleRate = 16000 // 16Khz
+
+    private var mRecorder: AudioRecord? = null
+    private var mFilePath: String? = null
+    private var mExtension: String? = null
+    private var bufferSize = 1024
+    private var mFileOutputStream: FileOutputStream? = null
+    private var mStatus = "unset"
+    private var mPeakPower = -120.0
+    private var mAveragePower = -120.0
+    private var mRecordingThread: Thread? = null
+    private var mDataSize: Long = 0
+    private var _result: Result? = null
+    private val CHANNEL: String? ="flutter_audio_recorder2"
+
+    override fun configureFlutterEngine(@NonNull flutterEngine: FlutterEngine) {
+        super.configureFlutterEngine(flutterEngine)
+        MethodChannel(flutterEngine.dartExecutor.binaryMessenger, CHANNEL).setMethodCallHandler {
+            call, result ->
+
+            _result = result
+
+            when (call.method) {
+                "hasPermissions" -> handleHasPermission()
+                "init" -> handleInit(call, result)
+                "current" -> handleCurrent(call, result)
+                "start" -> handleStart(call, result)
+                "pause" -> handlePause(call, result)
+                "resume" -> handleResume(call, result)
+                "stop" -> handleStop(call, result)
+                else -> result.notImplemented()
+            }
+        }
+    }
+    fun onRequestPermissionsResult(requestCode: Int, permissions: Array<String?>?, grantResults: IntArray): Boolean {
+        val REQUEST_RECORD_AUDIO_PERMISSION = 200
+        return when (requestCode) {
+            REQUEST_RECORD_AUDIO_PERMISSION -> {
+                var granted = true
+                Log.d(LOG_NAME, "parsing result")
+                for (result in grantResults) {
+                    if (result != PackageManager.PERMISSION_GRANTED) {
+                        Log.d(LOG_NAME, "result$result")
+                        granted = false
+                    }
+                }
+                Log.d(LOG_NAME, "onRequestPermissionsResult -$granted")
+                if (_result != null) {
+                    _result!!.success(granted)
+                }
+                granted
+            }
+            else -> {
+                Log.d(LOG_NAME, "onRequestPermissionsResult - false")
+                false
+            }
+        }
+    }
+
+    private fun hasRecordPermission(): Boolean {
+        // if after [Marshmallow], we need to check permission on runtime
+        return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+            (registrar?.let { ContextCompat.checkSelfPermission(it.context(), Manifest.permission.RECORD_AUDIO) } === PackageManager.PERMISSION_GRANTED
+                    && registrar?.let { ContextCompat.checkSelfPermission(it.context(), Manifest.permission.WRITE_EXTERNAL_STORAGE) } === PackageManager.PERMISSION_GRANTED)
+        } else {
+            registrar?.let { ContextCompat.checkSelfPermission(it.context(), Manifest.permission.RECORD_AUDIO) } === PackageManager.PERMISSION_GRANTED
+        }
+    }
+
+    private fun handleHasPermission() {
+        if (hasRecordPermission()) {
+            Log.d(LOG_NAME, "handleHasPermission true")
+            if (_result != null) {
+                _result!!.success(true)
+            }
+        } else {
+            Log.d(LOG_NAME, "handleHasPermission false")
+            if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) {
+                registrar?.let { ActivityCompat.requestPermissions(it.activity(), arrayOf<String>(Manifest.permission.RECORD_AUDIO, Manifest.permission.WRITE_EXTERNAL_STORAGE), PERMISSIONS_REQUEST_RECORD_AUDIO) }
+            } else {
+                registrar?.let { ActivityCompat.requestPermissions(it.activity(), arrayOf<String>(Manifest.permission.RECORD_AUDIO), PERMISSIONS_REQUEST_RECORD_AUDIO) }
+            }
+        }
+    }
+
+    private fun handleInit(call: MethodCall, result: Result) {
+        resetRecorder()
+        mSampleRate = Integer.parseInt(call.argument("sampleRate"))
+        mFilePath = call.argument("path")
+        mExtension = call.argument("extension")
+        bufferSize = AudioRecord.getMinBufferSize(mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)
+        mStatus = "initialized"
+        val initResult: HashMap<String, Any> = HashMap()
+        initResult.put("duration", "0")
+        mFilePath?.let { initResult.put("path", it) }
+        mExtension?.let { initResult.put("audioFormat", it) }
+        initResult.put("peakPower", mPeakPower)
+        initResult.put("averagePower", mAveragePower)
+        initResult.put("isMeteringEnabled", true)
+        initResult.put("status", mStatus)
+        result.success(initResult)
+    }
+
+    private fun handleCurrent(call: MethodCall, result: Result) {
+        val currentResult: HashMap<String, Any> = HashMap()
+        currentResult.put("duration", getDuration() * 1000)
+        if (mStatus === "stopped") mFilePath else getTempFilename()?.let { currentResult.put("path", it) }
+        mExtension?.let { currentResult.put("audioFormat", it) }
+        currentResult.put("peakPower", mPeakPower)
+        currentResult.put("averagePower", mAveragePower)
+        currentResult.put("isMeteringEnabled", true)
+        currentResult.put("status", mStatus)
+        // Log.d(LOG_NAME, currentResult.toString());
+        result.success(currentResult)
+    }
+
+    private fun handleStart(call: MethodCall, result: Result) {
+        mRecorder = AudioRecord(MediaRecorder.AudioSource.MIC, mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize)
+        try {
+            mFileOutputStream = FileOutputStream(getTempFilename())
+        } catch (e: FileNotFoundException) {
+            result.error("", "cannot find the file", null)
+            return
+        }
+        mRecorder!!.startRecording()
+        mStatus = "recording"
+        startThread()
+        result.success(null)
+    }
+
+    private fun startThread() {
+        mRecordingThread = Thread(Runnable {
+            processAudioStream()
+        },"Audio Processing Thread")
+
+        mRecordingThread!!.start()
+    }
+
+    private fun handlePause(call: MethodCall, result: Result) {
+        mStatus = "paused"
+        mPeakPower = -120.0
+        mAveragePower = -120.0
+        mRecorder?.stop()
+        mRecordingThread = null
+        result.success(null)
+    }
+
+    private fun handleResume(call: MethodCall, result: Result) {
+        mStatus = "recording"
+        mRecorder?.startRecording()
+        startThread()
+        result.success(null)
+    }
+
+    private fun handleStop(call: MethodCall, result: Result) {
+        if (mStatus.equals("stopped")) {
+            result.success(null)
+        } else {
+            mStatus = "stopped"
+
+            // Return Recording Object
+            val currentResult: HashMap<String, Any> = HashMap()
+            currentResult.put("duration", getDuration() * 1000)
+            mFilePath?.let { currentResult.put("path", it) }
+            mExtension?.let { currentResult.put("audioFormat", it) }
+            currentResult.put("peakPower", mPeakPower)
+            currentResult.put("averagePower", mAveragePower)
+            currentResult.put("isMeteringEnabled", true)
+            currentResult.put("status", mStatus)
+            resetRecorder()
+            mRecordingThread = null
+            mRecorder?.stop()
+            mRecorder!!.release()
+            try {
+                mFileOutputStream?.close()
+            } catch (e: IOException) {
+                e.printStackTrace()
+            }
+            Log.d(LOG_NAME, "before adding the wav header")
+            copyWaveFile(getTempFilename(), mFilePath!!)
+            deleteTempFile()
+
+            // Log.d(LOG_NAME, currentResult.toString());
+            result.success(currentResult)
+        }
+    }
+
+    private fun processAudioStream() {
+        Log.d(LOG_NAME, "processing the stream: $mStatus")
+        val size = bufferSize
+        val bData = ByteArray(size)
+        while (mStatus === "recording") {
+            Log.d(LOG_NAME, "reading audio data")
+            mRecorder?.read(bData, 0, bData.size)
+            mDataSize += bData.size
+            updatePowers(bData)
+            try {
+                mFileOutputStream?.write(bData)
+            } catch (e: IOException) {
+                e.printStackTrace()
+            }
+        }
+    }
+
+    private fun deleteTempFile() {
+        val file = File(getTempFilename())
+        if (file.exists()) {
+            file.delete()
+        }
+    }
+
+    private fun getTempFilename(): String {
+        return "$mFilePath.temp"
+    }
+
+    private fun copyWaveFile(inFilename: String, outFilename: String) {
+        var `in`: FileInputStream? = null
+        var out: FileOutputStream? = null
+        var totalAudioLen: Long = 0
+        var totalDataLen = totalAudioLen + 36
+        val longSampleRate = mSampleRate.toLong()
+        val channels = 1
+        val byteRate = (RECORDER_BPP * mSampleRate * channels / 8).toLong()
+        val data = ByteArray(bufferSize)
+        try {
+            `in` = FileInputStream(inFilename)
+            out = FileOutputStream(outFilename)
+            totalAudioLen = `in`.getChannel().size()
+            totalDataLen = totalAudioLen + 36
+            WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
+                    longSampleRate, channels, byteRate)
+            while (`in`.read(data) !== -1) {
+                out.write(data)
+            }
+            `in`.close()
+            out.close()
+        } catch (e: FileNotFoundException) {
+            e.printStackTrace()
+        } catch (e: IOException) {
+            e.printStackTrace()
+        }
+    }
+
+    @kotlin.Throws(IOException::class)
+    private fun WriteWaveFileHeader(out: FileOutputStream?, totalAudioLen: Long,
+                                    totalDataLen: Long, longSampleRate: Long, channels: Int, byteRate: Long) {
+        val header = ByteArray(44)
+        header[0] = 'R'.toByte() // RIFF/WAVE header
+        header[1] = 'I'.toByte()
+        header[2] = 'F'.toByte()
+        header[3] = 'F'.toByte()
+        header[4] = (totalDataLen and 0xff).toByte()
+        header[5] = (totalDataLen shr 8 and 0xff).toByte()
+        header[6] = (totalDataLen shr 16 and 0xff).toByte()
+        header[7] = (totalDataLen shr 24 and 0xff).toByte()
+        header[8] = 'W'.toByte()
+        header[9] = 'A'.toByte()
+        header[10] = 'V'.toByte()
+        header[11] = 'E'.toByte()
+        header[12] = 'f' .toByte()// 'fmt ' chunk
+        header[13] = 'm'.toByte()
+        header[14] = 't'.toByte()
+        header[15] = ' '.toByte()
+        header[16] = 16 // 4 bytes: size of 'fmt ' chunk
+        header[17] = 0
+        header[18] = 0
+        header[19] = 0
+        header[20] = 1 // format = 1
+        header[21] = 0
+        header[22] = channels.toByte()
+        header[23] = 0
+        header[24] = (longSampleRate and 0xff).toByte()
+        header[25] = (longSampleRate shr 8 and 0xff).toByte()
+        header[26] = (longSampleRate shr 16 and 0xff).toByte()
+        header[27] = (longSampleRate shr 24 and 0xff).toByte()
+        header[28] = (byteRate and 0xff).toByte()
+        header[29] = (byteRate shr 8 and 0xff).toByte()
+        header[30] = (byteRate shr 16 and 0xff).toByte()
+        header[31] = (byteRate shr 24 and 0xff).toByte()
+        header[32] = 1.toByte() // block align
+        header[33] = 0
+        header[34] = RECORDER_BPP // bits per sample
+        header[35] = 0
+        header[36] = 'd'.toByte()
+        header[37] = 'a'.toByte()
+        header[38] = 't'.toByte()
+        header[39] = 'a'.toByte()
+        header[40] = (totalAudioLen and 0xff).toByte()
+        header[41] = (totalAudioLen shr 8 and 0xff).toByte()
+        header[42] = (totalAudioLen shr 16 and 0xff).toByte()
+        header[43] = (totalAudioLen shr 24 and 0xff).toByte()
+        if (out != null) {
+            out.write(header, 0, 44)
+        }
+    }
+
+    private fun byte2short(bData: ByteArray): ShortArray {
+        val out = ShortArray(bData.size / 2)
+        ByteBuffer.wrap(bData).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(out)
+        return out
+    }
+
+    private fun resetRecorder() {
+        mPeakPower = -120.0
+        mAveragePower = -120.0
+        mDataSize = 0
+    }
+
+    private fun updatePowers(bdata: ByteArray) {
+        val data = byte2short(bdata)
+        val sampleVal = data[data.size - 1]
+        val escapeStatusList = arrayOf("paused", "stopped", "initialized", "unset")
+        mAveragePower = if (sampleVal.toInt() == 0 || Arrays.asList(escapeStatusList).contains(mStatus)) {
+            -120.0 // to match iOS silent case
+        } else {
+            // iOS factor : to match iOS power level
+            val iOSFactor = 0.25
+            20 * Math.log(Math.abs(sampleVal.toInt()) / 32768.0) * iOSFactor
+        }
+        mPeakPower = mAveragePower
+        // Log.d(LOG_NAME, "Peak: " + mPeakPower + " average: "+ mAveragePower);
+    }
+
+    private fun getDuration(): Int {
+        val duration = mDataSize / (mSampleRate * 2 * 1)
+        return duration.toInt()
+    }
+}

+ 3 - 0
android/build.gradle

@@ -1,4 +1,6 @@
 buildscript {
+    ext.kotlin_version = '1.3.50'
+
     repositories {
         google()
         mavenCentral()
@@ -6,6 +8,7 @@ buildscript {
 
     dependencies {
         classpath 'com.android.tools.build:gradle:4.1.0'
+        classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
     }
 }
 

+ 1 - 1
lib/pages/home_page.dart

@@ -74,7 +74,7 @@ class _HomePageState extends State<HomePage> {
           height: 200,
           color: Colors.white70,
           child: RecorderView(
-            save: _onFinish,
+            saveVoice: _onFinish,
           ),
         );
       },

+ 13 - 8
lib/plugins/audio_recorder.dart

@@ -7,8 +7,8 @@ import 'package:path/path.dart' as p;
 
 /// Audio Recorder Plugin
 class FlutterAudioRecorder {
-
-  static const MethodChannel _channel = MethodChannel('flutter_audio_recorder2');
+  static const MethodChannel _channel =
+      MethodChannel('flutter_audio_recorder2');
   static const String DEFAULT_EXTENSION = '.m4a';
   static LocalFileSystem fs = LocalFileSystem();
 
@@ -18,16 +18,21 @@ class FlutterAudioRecorder {
   String? _sampleRate;
 
   Future? _initRecorder;
+
   Future? get initialized => _initRecorder;
+
   Recording? get recording => _recording;
+
   /// 构造方法
   /// path audio文件路径
-  FlutterAudioRecorder(String path, {AudioFormat? audioFormat, String sampleRate = "16000"}) {
+  FlutterAudioRecorder(String path,
+      {AudioFormat? audioFormat, String sampleRate = "16000"}) {
     _initRecorder = _init(path, audioFormat, sampleRate);
   }
 
   /// 初始化 FlutterAudioRecorder 对象
-  Future _init(String? path, AudioFormat? audioFormat, String sampleRate) async {
+  Future _init(
+      String? path, AudioFormat? audioFormat, String sampleRate) async {
     String extension;
     String extensionInPath;
     if (path != null) {
@@ -57,7 +62,8 @@ class FlutterAudioRecorder {
       if (await file.exists()) {
         throw Exception("A file already exists at the path :" + path);
       } else if (!await file.parent.exists()) {
-        throw Exception("The specified parent directory does not exist ${file.parent}");
+        throw Exception(
+            "The specified parent directory does not exist ${file.parent}");
       }
     } else {
       extension = DEFAULT_EXTENSION; // default value
@@ -143,8 +149,7 @@ class FlutterAudioRecorder {
   void _responseToRecording(Map<String, Object>? response) {
     if (response == null) return;
 
-    _recording!.duration =
-    Duration(milliseconds: response['duration'] as int);
+    _recording!.duration = Duration(milliseconds: response['duration'] as int);
     _recording!.path = response['path'] as String?;
     _recording!.audioFormat =
         _stringToAudioFormat(response['audioFormat'] as String?);
@@ -250,7 +255,7 @@ class AudioMetering {
   AudioMetering({this.peakPower, this.averagePower, this.isMeteringEnabled});
 }
 
-/// Represent the status of a Recording
+/// 自定义录音状态
 enum RecordingStatus {
   /// Recording not initialized
   Unset,

+ 15 - 7
lib/views/recorder.dart

@@ -9,9 +9,9 @@ import 'package:permission_handler/permission_handler.dart';
 
 /// 录音模态框
 class RecorderView extends StatefulWidget {
-  final Function save;
+  final Function saveVoice;
 
-  const RecorderView({Key? key, required this.save}) : super(key: key);
+  const RecorderView({Key? key, required this.saveVoice}) : super(key: key);
 
   @override
   _RecorderViewState createState() => _RecorderViewState();
@@ -20,7 +20,11 @@ class RecorderView extends StatefulWidget {
 class _RecorderViewState extends State<RecorderView> {
   IconData _recordIcon = Icons.mic_none;
   MaterialColor colo = Colors.orange;
+
+  /// 录音状态
   RecordingStatus _currentStatus = RecordingStatus.Unset;
+
+  /// 是否录音停止
   bool stop = false;
   Recording? _current;
 
@@ -46,9 +50,12 @@ class _RecorderViewState extends State<RecorderView> {
     ].request();
     //bool hasPermission = await FlutterAudioRecorder.hasPermissions ?? false;
     if (statuses[Permission.microphone] == PermissionStatus.granted) {
+      /// 状态改为已初始化
       _currentStatus = RecordingStatus.Initialized;
       _recordIcon = Icons.mic;
-    } else {}
+    } else {
+      print("权限为获取");
+    }
   }
 
   @override
@@ -83,8 +90,8 @@ class _RecorderViewState extends State<RecorderView> {
                         )),
                         textStyle: MaterialStateProperty.all(
                             const TextStyle(color: Colors.orange))),
-                    onPressed: () async {
-                      await _onRecordButtonPressed();
+                    onPressed: () {
+                      _onRecordButtonPressed();
                       setState(() {});
                     },
                     child: Column(
@@ -168,7 +175,7 @@ class _RecorderViewState extends State<RecorderView> {
     );
   }
 
-  Future<void> _onRecordButtonPressed() async {
+  _onRecordButtonPressed() async {
     switch (_currentStatus) {
       case RecordingStatus.Initialized:
         {
@@ -191,6 +198,7 @@ class _RecorderViewState extends State<RecorderView> {
           break;
         }
       default:
+        print("---------weizb");
         break;
     }
   }
@@ -256,7 +264,7 @@ class _RecorderViewState extends State<RecorderView> {
   _stop() async {
     var result = await audioRecorder!.stop();
     Fluttertoast.showToast(msg: "Stop Recording , File Saved");
-    widget.save();
+    widget.saveVoice();
     setState(() {
       _current = result!;
       _currentStatus = _current!.status!;