diff --git a/android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt b/android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt index 7420e09d..aa725780 100644 --- a/android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt +++ b/android/src/main/kotlin/com/simform/audio_waveforms/AudioRecorder.kt @@ -66,18 +66,18 @@ class AudioRecorder : PluginRegistry.RequestPermissionsResultListener { fun stopRecording(result: MethodChannel.Result, recorder: MediaRecorder?, path: String) { try { - val audioInfoArrayList = ArrayList() - + val hashMap : HashMap = HashMap() try { recorder?.stop() val duration = getDuration(path) - audioInfoArrayList.add(path) - audioInfoArrayList.add(duration) + + hashMap[Constants.resultFilePath] = path + hashMap[Constants.resultDuration] = duration } catch (e: RuntimeException) { // Stop was called immediately after start which causes stop() call to fail. - audioInfoArrayList.add(null) - audioInfoArrayList.add("-1") + hashMap[Constants.resultFilePath] = null + hashMap[Constants.resultDuration] = "-1" } recorder?.apply { @@ -85,24 +85,24 @@ class AudioRecorder : PluginRegistry.RequestPermissionsResultListener { release() } - result.success(audioInfoArrayList) + result.success(hashMap) } catch (e: IllegalStateException) { Log.e(LOG_TAG, "Failed to stop recording") } } - private fun getDuration(path: String): String { + private fun getDuration(path: String): Int { val mediaMetadataRetriever = MediaMetadataRetriever() try { mediaMetadataRetriever.setDataSource(path) val duration = mediaMetadataRetriever.extractMetadata(METADATA_KEY_DURATION) - return duration ?: "-1" + return duration?.toInt() ?: -1 } catch (e: Exception) { Log.e(LOG_TAG, "Failed to get recording duration") } finally { mediaMetadataRetriever.release() } - return "-1" + return -1 } fun startRecorder(result: MethodChannel.Result, recorder: MediaRecorder?, useLegacy: Boolean) { diff --git a/android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt b/android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt index 075ae28a..1632b35e 100644 --- a/android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt +++ b/android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt @@ -66,6 +66,9 @@ object Constants { const val waveformData = "waveformData" const val useLegacyNormalization = "useLegacyNormalization" const val updateFrequency = "updateFrequency" + + const val resultFilePath = "resultFilePath" + const val resultDuration = "resultDuration" } enum class FinishMode(val value:Int) { diff --git a/ios/Classes/AudioRecorder.swift b/ios/Classes/AudioRecorder.swift index 8910c3c1..efee666e 100644 --- a/ios/Classes/AudioRecorder.swift +++ b/ios/Classes/AudioRecorder.swift @@ -62,26 +62,34 @@ public class AudioRecorder: NSObject, AVAudioRecorderDelegate{ audioRecorder?.stop() if(audioUrl != nil) { let asset = AVURLAsset(url: audioUrl!) + if #available(iOS 15.0, *) { Task { do { recordedDuration = try await asset.load(.duration) - result([path,Int(recordedDuration.seconds * 1000).description]) + sendResult(result, duration: Int(recordedDuration.seconds * 1000)) } catch let err { debugPrint(err.localizedDescription) - result([path,CMTime.zero.seconds.description]) + sendResult(result, duration: Int(CMTime.zero.seconds)) } } } else { recordedDuration = asset.duration - result([path,Int(recordedDuration.seconds * 1000).description]) + sendResult(result, duration: Int(recordedDuration.seconds * 1000)) } } else { - result([path,CMTime.zero.seconds.description]) + sendResult(result, duration: Int(CMTime.zero.seconds)) } audioRecorder = nil } + private func sendResult(_ result: @escaping FlutterResult, duration:Int){ + var params = [String:Any?]() + params[Constants.resultFilePath] = path + params[Constants.resultDuration] = duration + result(params) + } + public func pauseRecording(_ result: @escaping FlutterResult) { audioRecorder?.pause() result(false) diff --git a/ios/Classes/Utils.swift b/ios/Classes/Utils.swift index d691974d..24e44946 100644 --- a/ios/Classes/Utils.swift +++ b/ios/Classes/Utils.swift @@ -61,6 +61,8 @@ struct Constants { static let useLegacyNormalization = "useLegacyNormalization" static let updateFrequency = "updateFrequency" static let overrideAudioSession = "overrideAudioSession" + static let resultFilePath = "resultFilePath" + static let resultDuration = "resultDuration" } enum FinishMode : Int{ diff --git a/lib/src/base/audio_waveforms_interface.dart b/lib/src/base/audio_waveforms_interface.dart index 84b3a1ab..492aa8b1 100644 --- a/lib/src/base/audio_waveforms_interface.dart +++ b/lib/src/base/audio_waveforms_interface.dart @@ -65,10 +65,10 @@ class AudioWaveformsInterface { } ///platform call to stop recording - Future?> stop() async { - final audioInfo = + Future> stop() async { + Map audioInfo = await _methodChannel.invokeMethod(Constants.stopRecording); - return List.from(audioInfo ?? []); + return audioInfo.cast(); } ///platform call to resume recording. diff --git a/lib/src/base/constants.dart b/lib/src/base/constants.dart index 46f6dbd3..1332c012 100644 --- a/lib/src/base/constants.dart +++ b/lib/src/base/constants.dart @@ -48,4 +48,6 @@ class Constants { static const String useLegacyNormalization = "useLegacyNormalization"; static const String updateFrequency = "updateFrequency"; static const String overrideAudioSession = "overrideAudioSession"; + static const String resultFilePath = "resultFilePath"; + static const String resultDuration = "resultDuration"; } diff --git a/lib/src/controllers/recorder_controller.dart b/lib/src/controllers/recorder_controller.dart index 109fcd86..23547776 100644 --- a/lib/src/controllers/recorder_controller.dart +++ b/lib/src/controllers/recorder_controller.dart @@ -2,6 +2,7 @@ import 'dart:async'; import 'dart:io' show Platform; import 'dart:math' show max; +import 'package:audio_waveforms/src/base/constants.dart'; import 'package:flutter/material.dart'; import '/src/base/utils.dart'; @@ -310,24 +311,19 @@ class RecorderController extends ChangeNotifier { Future stop([bool callReset = true]) async { if (_recorderState.isRecording || _recorderState.isPaused) { final audioInfo = await AudioWaveformsInterface.instance.stop(); - if (audioInfo != null) { - _isRecording = false; - _timer?.cancel(); - _recorderTimer?.cancel(); - if (audioInfo[1] != null) { - var duration = int.tryParse(audioInfo[1]!); - if (duration != null) { - _recordedDuration = Duration(milliseconds: duration); - _recordedFileDurationController.add(recordedDuration); - } - } - _elapsedDuration = Duration.zero; - _setRecorderState(RecorderState.stopped); - if (callReset) reset(); - return audioInfo[0]; - } else { - throw "Failed stop recording"; + _isRecording = false; + _timer?.cancel(); + _recorderTimer?.cancel(); + if (audioInfo[Constants.resultDuration] != null) { + var duration = audioInfo[Constants.resultDuration]; + + _recordedDuration = Duration(milliseconds: duration); + _recordedFileDurationController.add(recordedDuration); } + _elapsedDuration = Duration.zero; + _setRecorderState(RecorderState.stopped); + if (callReset) reset(); + return audioInfo[Constants.resultFilePath]; } notifyListeners();