Browse Source

Merge branch 'main' of https://git.yoqi.me/lyq/flutter_audio_recorder into main

lyq.me@qq.com 1 year ago
parent
commit
f6aa5c2c9d

+ 9 - 0
android/app/build.gradle

@@ -22,11 +22,16 @@ if (flutterVersionName == null) {
 }
 
 apply plugin: 'com.android.application'
+apply plugin: 'kotlin-android'
 apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
 
 android {
     compileSdkVersion 31
 
+    sourceSets {
+        main.java.srcDirs += 'src/main/kotlin'
+    }
+
     compileOptions {
         sourceCompatibility JavaVersion.VERSION_1_8
         targetCompatibility JavaVersion.VERSION_1_8
@@ -53,3 +58,7 @@ android {
 flutter {
     source '../..'
 }
+
+dependencies {
+    implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
+}

+ 357 - 0
android/app/src/main/kotlin/me/yoqi/flutter/flutter_audio_record/MainActivity.kt

@@ -0,0 +1,357 @@
+package com.example.flutter_audio_record
+
+import android.Manifest
+import android.content.pm.PackageManager
+import android.media.AudioFormat
+import android.media.AudioRecord
+import android.media.MediaRecorder
+import android.os.Build
+import android.util.Log
+import androidx.annotation.NonNull
+import androidx.core.app.ActivityCompat
+import androidx.core.content.ContextCompat
+import io.flutter.embedding.android.FlutterActivity
+import io.flutter.embedding.engine.FlutterEngine
+import io.flutter.plugin.common.MethodCall
+import io.flutter.plugin.common.MethodChannel
+import io.flutter.plugin.common.MethodChannel.Result
+import io.flutter.plugin.common.PluginRegistry
+import java.io.*
+import java.nio.ByteBuffer
+import java.nio.ByteOrder
+import java.util.*
+
+class MainActivity: FlutterActivity() {
+    private val LOG_NAME = "AndroidAudioRecorder"
+    private val PERMISSIONS_REQUEST_RECORD_AUDIO = 200
+    private val RECORDER_BPP: Byte = 16 // we use 16bit
+
+    private val registrar: PluginRegistry.Registrar? = null
+    private var mSampleRate = 16000 // 16Khz
+
+    private var mRecorder: AudioRecord? = null
+    private var mFilePath: String? = null
+    private var mExtension: String? = null
+    private var bufferSize = 1024
+    private var mFileOutputStream: FileOutputStream? = null
+    private var mStatus = "unset"
+    private var mPeakPower = -120.0
+    private var mAveragePower = -120.0
+    private var mRecordingThread: Thread? = null
+    private var mDataSize: Long = 0
+    private var _result: Result? = null
+    private val CHANNEL: String? ="flutter_audio_recorder2"
+
+    override fun configureFlutterEngine(@NonNull flutterEngine: FlutterEngine) {
+        super.configureFlutterEngine(flutterEngine)
+        MethodChannel(flutterEngine.dartExecutor.binaryMessenger, CHANNEL).setMethodCallHandler {
+            call, result ->
+
+            _result = result
+
+            when (call.method) {
+                "hasPermissions" -> handleHasPermission()
+                "init" -> handleInit(call, result)
+                "current" -> handleCurrent(call, result)
+                "start" -> handleStart(call, result)
+                "pause" -> handlePause(call, result)
+                "resume" -> handleResume(call, result)
+                "stop" -> handleStop(call, result)
+                else -> result.notImplemented()
+            }
+        }
+    }
+    fun onRequestPermissionsResult(requestCode: Int, permissions: Array<String?>?, grantResults: IntArray): Boolean {
+        val REQUEST_RECORD_AUDIO_PERMISSION = 200
+        return when (requestCode) {
+            REQUEST_RECORD_AUDIO_PERMISSION -> {
+                var granted = true
+                Log.d(LOG_NAME, "parsing result")
+                for (result in grantResults) {
+                    if (result != PackageManager.PERMISSION_GRANTED) {
+                        Log.d(LOG_NAME, "result$result")
+                        granted = false
+                    }
+                }
+                Log.d(LOG_NAME, "onRequestPermissionsResult -$granted")
+                if (_result != null) {
+                    _result!!.success(granted)
+                }
+                granted
+            }
+            else -> {
+                Log.d(LOG_NAME, "onRequestPermissionsResult - false")
+                false
+            }
+        }
+    }
+
+    private fun hasRecordPermission(): Boolean {
+        // if after [Marshmallow], we need to check permission on runtime
+        return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+            (registrar?.let { ContextCompat.checkSelfPermission(it.context(), Manifest.permission.RECORD_AUDIO) } === PackageManager.PERMISSION_GRANTED
+                    && registrar?.let { ContextCompat.checkSelfPermission(it.context(), Manifest.permission.WRITE_EXTERNAL_STORAGE) } === PackageManager.PERMISSION_GRANTED)
+        } else {
+            registrar?.let { ContextCompat.checkSelfPermission(it.context(), Manifest.permission.RECORD_AUDIO) } === PackageManager.PERMISSION_GRANTED
+        }
+    }
+
+    private fun handleHasPermission() {
+        if (hasRecordPermission()) {
+            Log.d(LOG_NAME, "handleHasPermission true")
+            if (_result != null) {
+                _result!!.success(true)
+            }
+        } else {
+            Log.d(LOG_NAME, "handleHasPermission false")
+            if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) {
+                registrar?.let { ActivityCompat.requestPermissions(it.activity(), arrayOf<String>(Manifest.permission.RECORD_AUDIO, Manifest.permission.WRITE_EXTERNAL_STORAGE), PERMISSIONS_REQUEST_RECORD_AUDIO) }
+            } else {
+                registrar?.let { ActivityCompat.requestPermissions(it.activity(), arrayOf<String>(Manifest.permission.RECORD_AUDIO), PERMISSIONS_REQUEST_RECORD_AUDIO) }
+            }
+        }
+    }
+
+    private fun handleInit(call: MethodCall, result: Result) {
+        resetRecorder()
+        mSampleRate = Integer.parseInt(call.argument("sampleRate"))
+        mFilePath = call.argument("path")
+        mExtension = call.argument("extension")
+        bufferSize = AudioRecord.getMinBufferSize(mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)
+        mStatus = "initialized"
+        val initResult: HashMap<String, Any> = HashMap()
+        initResult.put("duration", "0")
+        mFilePath?.let { initResult.put("path", it) }
+        mExtension?.let { initResult.put("audioFormat", it) }
+        initResult.put("peakPower", mPeakPower)
+        initResult.put("averagePower", mAveragePower)
+        initResult.put("isMeteringEnabled", true)
+        initResult.put("status", mStatus)
+        result.success(initResult)
+    }
+
+    private fun handleCurrent(call: MethodCall, result: Result) {
+        val currentResult: HashMap<String, Any> = HashMap()
+        currentResult.put("duration", getDuration() * 1000)
+        if (mStatus === "stopped") mFilePath else getTempFilename()?.let { currentResult.put("path", it) }
+        mExtension?.let { currentResult.put("audioFormat", it) }
+        currentResult.put("peakPower", mPeakPower)
+        currentResult.put("averagePower", mAveragePower)
+        currentResult.put("isMeteringEnabled", true)
+        currentResult.put("status", mStatus)
+        // Log.d(LOG_NAME, currentResult.toString());
+        result.success(currentResult)
+    }
+
+    private fun handleStart(call: MethodCall, result: Result) {
+        mRecorder = AudioRecord(MediaRecorder.AudioSource.MIC, mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize)
+        try {
+            mFileOutputStream = FileOutputStream(getTempFilename())
+        } catch (e: FileNotFoundException) {
+            result.error("", "cannot find the file", null)
+            return
+        }
+        mRecorder!!.startRecording()
+        mStatus = "recording"
+        startThread()
+        result.success(null)
+    }
+
+    private fun startThread() {
+        mRecordingThread = Thread(Runnable {
+            processAudioStream()
+        },"Audio Processing Thread")
+
+        mRecordingThread!!.start()
+    }
+
+    private fun handlePause(call: MethodCall, result: Result) {
+        mStatus = "paused"
+        mPeakPower = -120.0
+        mAveragePower = -120.0
+        mRecorder?.stop()
+        mRecordingThread = null
+        result.success(null)
+    }
+
+    private fun handleResume(call: MethodCall, result: Result) {
+        mStatus = "recording"
+        mRecorder?.startRecording()
+        startThread()
+        result.success(null)
+    }
+
+    private fun handleStop(call: MethodCall, result: Result) {
+        if (mStatus.equals("stopped")) {
+            result.success(null)
+        } else {
+            mStatus = "stopped"
+
+            // Return Recording Object
+            val currentResult: HashMap<String, Any> = HashMap()
+            currentResult.put("duration", getDuration() * 1000)
+            mFilePath?.let { currentResult.put("path", it) }
+            mExtension?.let { currentResult.put("audioFormat", it) }
+            currentResult.put("peakPower", mPeakPower)
+            currentResult.put("averagePower", mAveragePower)
+            currentResult.put("isMeteringEnabled", true)
+            currentResult.put("status", mStatus)
+            resetRecorder()
+            mRecordingThread = null
+            mRecorder?.stop()
+            mRecorder!!.release()
+            try {
+                mFileOutputStream?.close()
+            } catch (e: IOException) {
+                e.printStackTrace()
+            }
+            Log.d(LOG_NAME, "before adding the wav header")
+            copyWaveFile(getTempFilename(), mFilePath!!)
+            deleteTempFile()
+
+            // Log.d(LOG_NAME, currentResult.toString());
+            result.success(currentResult)
+        }
+    }
+
+    private fun processAudioStream() {
+        Log.d(LOG_NAME, "processing the stream: $mStatus")
+        val size = bufferSize
+        val bData = ByteArray(size)
+        while (mStatus === "recording") {
+            Log.d(LOG_NAME, "reading audio data")
+            mRecorder?.read(bData, 0, bData.size)
+            mDataSize += bData.size
+            updatePowers(bData)
+            try {
+                mFileOutputStream?.write(bData)
+            } catch (e: IOException) {
+                e.printStackTrace()
+            }
+        }
+    }
+
+    private fun deleteTempFile() {
+        val file = File(getTempFilename())
+        if (file.exists()) {
+            file.delete()
+        }
+    }
+
+    private fun getTempFilename(): String {
+        return "$mFilePath.temp"
+    }
+
+    private fun copyWaveFile(inFilename: String, outFilename: String) {
+        var `in`: FileInputStream? = null
+        var out: FileOutputStream? = null
+        var totalAudioLen: Long = 0
+        var totalDataLen = totalAudioLen + 36
+        val longSampleRate = mSampleRate.toLong()
+        val channels = 1
+        val byteRate = (RECORDER_BPP * mSampleRate * channels / 8).toLong()
+        val data = ByteArray(bufferSize)
+        try {
+            `in` = FileInputStream(inFilename)
+            out = FileOutputStream(outFilename)
+            totalAudioLen = `in`.getChannel().size()
+            totalDataLen = totalAudioLen + 36
+            WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
+                    longSampleRate, channels, byteRate)
+            while (`in`.read(data) !== -1) {
+                out.write(data)
+            }
+            `in`.close()
+            out.close()
+        } catch (e: FileNotFoundException) {
+            e.printStackTrace()
+        } catch (e: IOException) {
+            e.printStackTrace()
+        }
+    }
+
+    @kotlin.Throws(IOException::class)
+    private fun WriteWaveFileHeader(out: FileOutputStream?, totalAudioLen: Long,
+                                    totalDataLen: Long, longSampleRate: Long, channels: Int, byteRate: Long) {
+        val header = ByteArray(44)
+        header[0] = 'R'.toByte() // RIFF/WAVE header
+        header[1] = 'I'.toByte()
+        header[2] = 'F'.toByte()
+        header[3] = 'F'.toByte()
+        header[4] = (totalDataLen and 0xff).toByte()
+        header[5] = (totalDataLen shr 8 and 0xff).toByte()
+        header[6] = (totalDataLen shr 16 and 0xff).toByte()
+        header[7] = (totalDataLen shr 24 and 0xff).toByte()
+        header[8] = 'W'.toByte()
+        header[9] = 'A'.toByte()
+        header[10] = 'V'.toByte()
+        header[11] = 'E'.toByte()
+        header[12] = 'f' .toByte()// 'fmt ' chunk
+        header[13] = 'm'.toByte()
+        header[14] = 't'.toByte()
+        header[15] = ' '.toByte()
+        header[16] = 16 // 4 bytes: size of 'fmt ' chunk
+        header[17] = 0
+        header[18] = 0
+        header[19] = 0
+        header[20] = 1 // format = 1
+        header[21] = 0
+        header[22] = channels.toByte()
+        header[23] = 0
+        header[24] = (longSampleRate and 0xff).toByte()
+        header[25] = (longSampleRate shr 8 and 0xff).toByte()
+        header[26] = (longSampleRate shr 16 and 0xff).toByte()
+        header[27] = (longSampleRate shr 24 and 0xff).toByte()
+        header[28] = (byteRate and 0xff).toByte()
+        header[29] = (byteRate shr 8 and 0xff).toByte()
+        header[30] = (byteRate shr 16 and 0xff).toByte()
+        header[31] = (byteRate shr 24 and 0xff).toByte()
+        header[32] = 1.toByte() // block align
+        header[33] = 0
+        header[34] = RECORDER_BPP // bits per sample
+        header[35] = 0
+        header[36] = 'd'.toByte()
+        header[37] = 'a'.toByte()
+        header[38] = 't'.toByte()
+        header[39] = 'a'.toByte()
+        header[40] = (totalAudioLen and 0xff).toByte()
+        header[41] = (totalAudioLen shr 8 and 0xff).toByte()
+        header[42] = (totalAudioLen shr 16 and 0xff).toByte()
+        header[43] = (totalAudioLen shr 24 and 0xff).toByte()
+        if (out != null) {
+            out.write(header, 0, 44)
+        }
+    }
+
+    private fun byte2short(bData: ByteArray): ShortArray {
+        val out = ShortArray(bData.size / 2)
+        ByteBuffer.wrap(bData).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(out)
+        return out
+    }
+
+    private fun resetRecorder() {
+        mPeakPower = -120.0
+        mAveragePower = -120.0
+        mDataSize = 0
+    }
+
+    private fun updatePowers(bdata: ByteArray) {
+        val data = byte2short(bdata)
+        val sampleVal = data[data.size - 1]
+        val escapeStatusList = arrayOf("paused", "stopped", "initialized", "unset")
+        mAveragePower = if (sampleVal.toInt() == 0 || Arrays.asList(escapeStatusList).contains(mStatus)) {
+            -120.0 // to match iOS silent case
+        } else {
+            // iOS factor : to match iOS power level
+            val iOSFactor = 0.25
+            20 * Math.log(Math.abs(sampleVal.toInt()) / 32768.0) * iOSFactor
+        }
+        mPeakPower = mAveragePower
+        // Log.d(LOG_NAME, "Peak: " + mPeakPower + " average: "+ mAveragePower);
+    }
+
+    private fun getDuration(): Int {
+        val duration = mDataSize / (mSampleRate * 2 * 1)
+        return duration.toInt()
+    }
+}

+ 3 - 0
android/build.gradle

@@ -1,4 +1,6 @@
 buildscript {
+    ext.kotlin_version = '1.4.32'
+
     repositories {
         google()
         mavenCentral()
@@ -6,6 +8,7 @@ buildscript {
 
     dependencies {
         classpath 'com.android.tools.build:gradle:4.1.0'
+        classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
     }
 }
 

+ 19 - 13
lib/pages/home_page.dart

@@ -22,12 +22,6 @@ class _HomePageState extends State<HomePage> {
   @override
   Widget build(BuildContext context) {
     return Scaffold(
-      floatingActionButton: FloatingActionButton(
-        onPressed: () {
-          showRecord(context);
-        },
-        child: const Icon(Icons.mic),
-      ),
       appBar: AppBar(
         title: const Text(
           "录音机App",
@@ -35,19 +29,31 @@ class _HomePageState extends State<HomePage> {
         ),
         centerTitle: true,
       ),
-      body: Column(
+      floatingActionButton: FloatingActionButton(
+        onPressed: () {
+          showRecord(context);
+        },
+        child: const Icon(Icons.mic),
+      ),
+      body: Stack(
         children: [
-          Expanded(
-            flex: 2,
-            child: RecordList(
-              records: records!,
-            ),
+          Center(child: Text("天问科技")),
+          Column(
+            children: [
+              Expanded(
+                flex: 2,
+                child: RecordList(
+                  records: records!,
+                ),
+              ),
+            ],
           ),
         ],
       ),
     );
   }
 
+  /// 回调函数
   _onFinish() {
     records!.clear();
     appDir!.list().listen((onData) {
@@ -68,7 +74,7 @@ class _HomePageState extends State<HomePage> {
           height: 200,
           color: Colors.white70,
           child: RecorderView(
-            save: _onFinish,
+            saveVoice: _onFinish,
           ),
         );
       },

+ 281 - 0
lib/plugins/audio_recorder.dart

@@ -0,0 +1,281 @@
+import 'dart:async';
+import 'dart:io';
+
+import 'package:file/local.dart';
+import 'package:flutter/services.dart';
+import 'package:path/path.dart' as p;
+
+/// Audio Recorder Plugin
+class FlutterAudioRecorder {
+  static const MethodChannel _channel =
+      MethodChannel('flutter_audio_recorder2');
+  static const String DEFAULT_EXTENSION = '.m4a';
+  static LocalFileSystem fs = LocalFileSystem();
+
+  String? _path;
+  String? _extension;
+  Recording? _recording;
+  String? _sampleRate;
+
+  Future? _initRecorder;
+
+  Future? get initialized => _initRecorder;
+
+  Recording? get recording => _recording;
+
+  /// 构造方法
+  /// path audio文件路径
+  FlutterAudioRecorder(String path,
+      {AudioFormat? audioFormat, String sampleRate = "16000"}) {
+    _initRecorder = _init(path, audioFormat, sampleRate);
+  }
+
+  /// 初始化 FlutterAudioRecorder 对象
+  Future _init(
+      String? path, AudioFormat? audioFormat, String sampleRate) async {
+    String extension;
+    String extensionInPath;
+    if (path != null) {
+      // Extension(.xyz) of Path
+      extensionInPath = p.extension(path);
+      // Use AudioFormat
+      if (audioFormat != null) {
+        // .m4a != .m4a
+        if (_stringToAudioFormat(extensionInPath) != audioFormat) {
+          // use AudioOutputFormat
+          extension = _audioFormatToString(audioFormat);
+          path = p.withoutExtension(path) + extension;
+        } else {
+          extension = p.extension(path);
+        }
+      } else {
+        // Else, Use Extension that inferred from Path
+        // if extension in path is valid
+        if (_isValidAudioFormat(extensionInPath)) {
+          extension = extensionInPath;
+        } else {
+          extension = DEFAULT_EXTENSION; // default value
+          path += extension;
+        }
+      }
+      File file = fs.file(path);
+      if (await file.exists()) {
+        throw Exception("A file already exists at the path :" + path);
+      } else if (!await file.parent.exists()) {
+        throw Exception(
+            "The specified parent directory does not exist ${file.parent}");
+      }
+    } else {
+      extension = DEFAULT_EXTENSION; // default value
+    }
+    _path = path;
+    _extension = extension;
+    _sampleRate = sampleRate;
+
+    late Map<String, Object> response;
+    var result = await _channel.invokeMethod('init',
+        {"path": _path, "extension": _extension, "sampleRate": _sampleRate});
+
+    if (result != false) {
+      response = Map.from(result);
+    }
+
+    _recording = Recording()
+      ..status = _stringToRecordingStatus(response['status'] as String?)
+      ..metering = AudioMetering(
+          averagePower: -120, peakPower: -120, isMeteringEnabled: true);
+
+    return;
+  }
+
+  /// Request an initialized recording instance to be [started]
+  /// Once executed, audio recording will start working and
+  /// a file will be generated in user's file system
+  Future start() async {
+    return _channel.invokeMethod('start');
+  }
+
+  /// Request currently [Recording] recording to be [Paused]
+  /// Note: Use [current] to get latest state of recording after [pause]
+  Future pause() async {
+    return _channel.invokeMethod('pause');
+  }
+
+  /// Request currently [Paused] recording to continue
+  Future resume() async {
+    return _channel.invokeMethod('resume');
+  }
+
+  /// Request the recording to stop
+  /// Once its stopped, the recording file will be finalized
+  /// and will not be start, resume, pause anymore.
+  Future<Recording?> stop() async {
+    Map<String, Object> response;
+    var result = await _channel.invokeMethod('stop');
+
+    if (result != null) {
+      response = Map.from(result);
+      _responseToRecording(response);
+    }
+
+    return _recording;
+  }
+
+  /// Ask for current status of recording
+  /// Returns the result of current recording status
+  /// Metering level, Duration, Status...
+  Future<Recording?> current({int channel = 0}) async {
+    Map<String, Object> response;
+
+    var result = await _channel.invokeMethod('current', {"channel": channel});
+
+    if (result != null && _recording?.status != RecordingStatus.Stopped) {
+      response = Map.from(result);
+      _responseToRecording(response);
+    }
+
+    return _recording;
+  }
+
+  /// Returns the result of record permission
+  /// if not determined(app first launch),
+  /// this will ask user to whether grant the permission
+  static Future<bool?> get hasPermissions async {
+    bool? hasPermission = await _channel.invokeMethod('hasPermissions');
+    return hasPermission;
+  }
+
+  ///  util - response msg to recording object.
+  void _responseToRecording(Map<String, Object>? response) {
+    if (response == null) return;
+
+    _recording!.duration = Duration(milliseconds: response['duration'] as int);
+    _recording!.path = response['path'] as String?;
+    _recording!.audioFormat =
+        _stringToAudioFormat(response['audioFormat'] as String?);
+    _recording!.extension = response['audioFormat'] as String?;
+    _recording!.metering = AudioMetering(
+        peakPower: response['peakPower'] as double?,
+        averagePower: response['averagePower'] as double?,
+        isMeteringEnabled: response['isMeteringEnabled'] as bool?);
+    _recording!.status =
+        _stringToRecordingStatus(response['status'] as String?);
+  }
+
+  /// util - verify if extension string is supported
+  static bool _isValidAudioFormat(String extension) {
+    switch (extension) {
+      case ".wav":
+      case ".mp4":
+      case ".aac":
+      case ".m4a":
+        return true;
+      default:
+        return false;
+    }
+  }
+
+  /// util - Convert String to Enum
+  static AudioFormat? _stringToAudioFormat(String? extension) {
+    switch (extension) {
+      case ".wav":
+        return AudioFormat.WAV;
+      case ".mp4":
+      case ".aac":
+      case ".m4a":
+        return AudioFormat.AAC;
+      default:
+        return null;
+    }
+  }
+
+  /// Convert Enum to String
+  static String _audioFormatToString(AudioFormat format) {
+    switch (format) {
+      case AudioFormat.WAV:
+        return ".wav";
+      case AudioFormat.AAC:
+        return ".m4a";
+      default:
+        return ".m4a";
+    }
+  }
+
+  /// util - Convert String to Enum
+  static RecordingStatus _stringToRecordingStatus(String? status) {
+    switch (status) {
+      case "unset":
+        return RecordingStatus.Unset;
+      case "initialized":
+        return RecordingStatus.Initialized;
+      case "recording":
+        return RecordingStatus.Recording;
+      case "paused":
+        return RecordingStatus.Paused;
+      case "stopped":
+        return RecordingStatus.Stopped;
+      default:
+        return RecordingStatus.Unset;
+    }
+  }
+}
+
+/// Recording Object - represent a recording file
+class Recording {
+  /// File path
+  String? path;
+
+  /// Extension
+  String? extension;
+
+  /// Duration in milliseconds
+  Duration? duration;
+
+  /// Audio format
+  AudioFormat? audioFormat;
+
+  /// Metering
+  AudioMetering? metering;
+
+  /// Is currently recording
+  RecordingStatus? status;
+}
+
+/// Audio Metering Level - describe the metering level of microphone when recording
+class AudioMetering {
+  /// Represent peak level of given short duration
+  double? peakPower;
+
+  /// Represent average level of given short duration
+  double? averagePower;
+
+  /// Is metering enabled in system
+  bool? isMeteringEnabled;
+
+  AudioMetering({this.peakPower, this.averagePower, this.isMeteringEnabled});
+}
+
+/// 自定义录音状态
+enum RecordingStatus {
+  /// Recording not initialized
+  Unset,
+
+  /// Ready for start recording
+  Initialized,
+
+  /// Currently recording
+  Recording,
+
+  /// Currently Paused
+  Paused,
+
+  /// This specific recording Stopped, cannot be start again
+  Stopped,
+}
+
+/// Audio Format,
+/// WAV is lossless audio, recommended
+enum AudioFormat {
+  AAC,
+  WAV,
+}

+ 17 - 0
lib/plugins/generated_plugin_registrant.dart

@@ -0,0 +1,17 @@
+//
+// Generated file. Do not edit.
+//
+
+// ignore_for_file: lines_longer_than_80_chars
+
+import 'package:audioplayers/web/audioplayers_web.dart';
+import 'package:fluttertoast/fluttertoast_web.dart';
+
+import 'package:flutter_web_plugins/flutter_web_plugins.dart';
+
+// ignore: public_member_api_docs
+void registerPlugins(Registrar registrar) {
+  AudioplayersPlugin.registerWith(registrar);
+  FluttertoastWebPlugin.registerWith(registrar);
+  registrar.registerMessageHandler();
+}

+ 1 - 1
lib/views/record_list.dart

@@ -183,7 +183,7 @@ class _Presso extends StatelessWidget {
   Widget build(BuildContext context) {
     return ButtonTheme(
       minWidth: 48.0,
-      child: RaisedButton(
+      child: ElevatedButton(
           child: Icon(
             ico,
             color: Colors.white,

+ 48 - 29
lib/views/recorder.dart

@@ -2,14 +2,16 @@ import 'dart:async';
 import 'dart:io';
 
 import 'package:flutter/material.dart';
+import 'package:flutter_audio_recorder/plugins/audio_recorder.dart';
 import 'package:fluttertoast/fluttertoast.dart';
 import 'package:path_provider/path_provider.dart';
 import 'package:permission_handler/permission_handler.dart';
 
+/// 录音模态框
 class RecorderView extends StatefulWidget {
-  final Function save;
+  final Function saveVoice;
 
-  const RecorderView({Key? key, required this.save}) : super(key: key);
+  const RecorderView({Key? key, required this.saveVoice}) : super(key: key);
 
   @override
   _RecorderViewState createState() => _RecorderViewState();
@@ -18,7 +20,11 @@ class RecorderView extends StatefulWidget {
 class _RecorderViewState extends State<RecorderView> {
   IconData _recordIcon = Icons.mic_none;
   MaterialColor colo = Colors.orange;
-     _currentStatus = RecordingStatus.Unset;
+
+  /// 录音状态
+  RecordingStatus _currentStatus = RecordingStatus.Unset;
+
+  /// 是否录音停止
   bool stop = false;
   Recording? _current;
 
@@ -44,9 +50,12 @@ class _RecorderViewState extends State<RecorderView> {
     ].request();
     //bool hasPermission = await FlutterAudioRecorder.hasPermissions ?? false;
     if (statuses[Permission.microphone] == PermissionStatus.granted) {
+      /// 状态改为已初始化
       _currentStatus = RecordingStatus.Initialized;
       _recordIcon = Icons.mic;
-    } else {}
+    } else {
+      print("权限为获取");
+    }
   }
 
   @override
@@ -67,27 +76,27 @@ class _RecorderViewState extends State<RecorderView> {
               height: 20,
             ),
             Text(
-              (_current == null)
-                  ? "0:0:0:0"
-                  : _current!.duration.toString(),
+              (_current == null) ? "0:0:0:0" : _current!.duration.toString(),
               style: const TextStyle(color: Colors.black, fontSize: 20),
             ),
             const SizedBox(
               height: 20,
             ),
             stop == false
-                ? RaisedButton(
-                    color: Colors.orange,
-                    onPressed: () async {
-                      await _onRecordButtonPressed();
+                ? ElevatedButton(
+                    style: ButtonStyle(
+                        shape: MaterialStateProperty.all(RoundedRectangleBorder(
+                          borderRadius: BorderRadius.circular(10),
+                        )),
+                        textStyle: MaterialStateProperty.all(
+                            const TextStyle(color: Colors.orange))),
+                    onPressed: () {
+                      _onRecordButtonPressed();
                       setState(() {});
                     },
-                    shape: RoundedRectangleBorder(
-                      borderRadius: BorderRadius.circular(10),
-                    ),
                     child: Column(
                       children: [
-                        Container(
+                        SizedBox(
                           width: 80,
                           height: 80,
                           child: Icon(
@@ -111,15 +120,19 @@ class _RecorderViewState extends State<RecorderView> {
                     child: Row(
                       mainAxisAlignment: MainAxisAlignment.spaceBetween,
                       children: [
-                        RaisedButton(
-                          color: colo,
+                        ElevatedButton(
+                          style: ButtonStyle(
+                              textStyle: MaterialStateProperty.all(
+                                  TextStyle(color: colo)),
+                              shape: MaterialStateProperty.all(
+                                RoundedRectangleBorder(
+                                  borderRadius: BorderRadius.circular(10),
+                                ),
+                              )),
                           onPressed: () async {
                             await _onRecordButtonPressed();
                             setState(() {});
                           },
-                          shape: RoundedRectangleBorder(
-                            borderRadius: BorderRadius.circular(10),
-                          ),
                           child: Container(
                             width: 80,
                             height: 80,
@@ -130,18 +143,23 @@ class _RecorderViewState extends State<RecorderView> {
                             ),
                           ),
                         ),
-                        RaisedButton(
-                          color: Colors.orange,
+                        ElevatedButton(
+                          style: ButtonStyle(
+                              shape: MaterialStateProperty.all(
+                                RoundedRectangleBorder(
+                                  borderRadius: BorderRadius.circular(10),
+                                ),
+                              ),
+                              textStyle: MaterialStateProperty.all(TextStyle(
+                                color: Colors.orange,
+                              ))),
                           onPressed: _currentStatus != RecordingStatus.Unset
                               ? _stop
                               : null,
-                          shape: RoundedRectangleBorder(
-                            borderRadius: BorderRadius.circular(10),
-                          ),
                           child: Container(
                             width: 80,
                             height: 80,
-                            child: Icon(
+                            child: const Icon(
                               Icons.stop,
                               color: Colors.white,
                               size: 50,
@@ -157,7 +175,7 @@ class _RecorderViewState extends State<RecorderView> {
     );
   }
 
-  Future<void> _onRecordButtonPressed() async {
+  _onRecordButtonPressed() async {
     switch (_currentStatus) {
       case RecordingStatus.Initialized:
         {
@@ -180,6 +198,7 @@ class _RecorderViewState extends State<RecorderView> {
           break;
         }
       default:
+        print("---------weizb");
         break;
     }
   }
@@ -209,7 +228,7 @@ class _RecorderViewState extends State<RecorderView> {
       _current = recording!;
     });
 
-    const tick = const Duration(milliseconds: 50);
+    const tick = Duration(milliseconds: 50);
     Timer.periodic(tick, (Timer t) async {
       if (_currentStatus == RecordingStatus.Stopped) {
         t.cancel();
@@ -245,7 +264,7 @@ class _RecorderViewState extends State<RecorderView> {
   _stop() async {
     var result = await audioRecorder!.stop();
     Fluttertoast.showToast(msg: "Stop Recording , File Saved");
-    widget.save();
+    widget.saveVoice();
     setState(() {
       _current = result!;
       _currentStatus = _current!.status!;

+ 14 - 0
pubspec.lock

@@ -163,6 +163,13 @@ packages:
       url: "https://pub.flutter-io.cn"
     source: hosted
     version: "1.7.0"
+  mime:
+    dependency: transitive
+    description:
+      name: mime
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "1.0.1"
   path:
     dependency: transitive
     description:
@@ -254,6 +261,13 @@ packages:
       url: "https://pub.flutter-io.cn"
     source: hosted
     version: "4.2.4"
+  share:
+    dependency: "direct main"
+    description:
+      name: share
+      url: "https://pub.flutter-io.cn"
+    source: hosted
+    version: "2.0.4"
   sky_engine:
     dependency: transitive
     description: flutter

+ 2 - 2
pubspec.yaml

@@ -1,7 +1,7 @@
 name: flutter_audio_recorder
 description: A new Flutter application.
 publish_to: 'none' # Remove this line if you wish to publish to pub.dev
-version: 1.0.0+1
+version: 1.1.0+1
 
 environment:
   sdk: ">=2.12.0 <3.0.0"
@@ -16,7 +16,7 @@ dependencies:
   audioplayers: ^0.20.1
 
 #  rflutter_alert: ^2.0.2
-#  share: ^2.0.4
+  share: ^2.0.4
 
 dev_dependencies:
   flutter_test: