diff --git a/CHANGELOG.md b/CHANGELOG.md index 2b675f11..2a20ed4b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,9 @@ +## 0.2.0 + +- Reworked waveforms from audio file + - Breaking: removed `readingComplete` PlayerState and `visualizerHeight`. With this, added `extractWaveforms` function to extract waveforms. + - Added `onCurrentExtractedWaveformData` and `onExtractionProgress` to monitor progress and currently extracted waveform data. + ## 0.1.6 - Fixed [#101](https://github.com/SimformSolutionsPvtLtd/audio_waveforms/issues/101) - Fixed setting volume for android throws error diff --git a/android/build.gradle b/android/build.gradle index a93d3215..563ac103 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -41,7 +41,7 @@ android { } defaultConfig { - minSdkVersion 16 + minSdkVersion 21 } } diff --git a/android/src/main/kotlin/com/simform/audio_waveforms/AudioPlayer.kt b/android/src/main/kotlin/com/simform/audio_waveforms/AudioPlayer.kt index 8e785dfc..02e25fc2 100644 --- a/android/src/main/kotlin/com/simform/audio_waveforms/AudioPlayer.kt +++ b/android/src/main/kotlin/com/simform/audio_waveforms/AudioPlayer.kt @@ -1,15 +1,21 @@ package com.simform.audio_waveforms import android.content.Context +import android.os.Build import android.os.Handler import android.os.Looper +import androidx.annotation.RequiresApi import com.google.android.exoplayer2.ExoPlayer import com.google.android.exoplayer2.MediaItem import com.google.android.exoplayer2.Player import io.flutter.plugin.common.MethodChannel import java.lang.Exception -class AudioPlayer(context: Context, channel: MethodChannel, playerKey: String) { +class AudioPlayer( + context: Context, + channel: MethodChannel, + playerKey: String +) { private var handler: Handler = Handler(Looper.getMainLooper()) private var runnable: Runnable? = null private var methodChannel = channel @@ -19,6 +25,43 @@ class AudioPlayer(context: Context, channel: MethodChannel, playerKey: String) { private var isPlayerPrepared: Boolean = false private var finishMode = FinishMode.Stop private var key = playerKey + private var waveformExtractor: WaveformExtractor? = null + private var noOfSamples = 100 + + fun extractWaveform( + result: MethodChannel.Result, + path: String?, + noOfSamples: Int?, + ) { + if (path != null) { + this.noOfSamples = noOfSamples ?: 100 + try { + waveformExtractor = WaveformExtractor( + path = path, + expectedPoints = this.noOfSamples, + key = key, + methodChannel = methodChannel, + result = result, + object : ExtractorCallBack { + override fun onProgress(value: Float) { + if (value == 1.0F) { + result.success(waveformExtractor?.sampleData) + } + } + } + ) + waveformExtractor?.startDecode() + waveformExtractor?.stop() + } catch (e: Exception) { + result.error( + Constants.LOG_TAG, + "Can not extract waveform data from provided audio file path", + e.toString() + ) + } + + } + } fun preparePlayer( result: MethodChannel.Result, diff --git a/android/src/main/kotlin/com/simform/audio_waveforms/AudioWaveformsPlugin.kt b/android/src/main/kotlin/com/simform/audio_waveforms/AudioWaveformsPlugin.kt index 2b39731a..983b3831 100644 --- a/android/src/main/kotlin/com/simform/audio_waveforms/AudioWaveformsPlugin.kt +++ b/android/src/main/kotlin/com/simform/audio_waveforms/AudioWaveformsPlugin.kt @@ -139,6 +139,16 @@ class AudioWaveformsPlugin : FlutterPlugin, MethodCallHandler, ActivityAware { result.error(Constants.LOG_TAG, "Player key can't be null", "") } } + Constants.extractWaveformData -> { + val key = call.argument(Constants.playerKey) as String? + val path = call.argument(Constants.path) as String? + val noOfSample = call.argument(Constants.noOfSamples) as Int? + if (key != null) { + audioPlayers[key]?.extractWaveform(result, path, noOfSample) + } else { + result.error(Constants.LOG_TAG, "Player key can't be null", "") + } + } Constants.stopAllPlayers -> { for ((key, _) in audioPlayers) { audioPlayers[key]?.stop(result) @@ -198,7 +208,11 @@ class AudioWaveformsPlugin : FlutterPlugin, MethodCallHandler, ActivityAware { private fun initPlayer(playerKey: String) { if (audioPlayers[playerKey] == null) { - val newPlayer = AudioPlayer(applicationContext, channel, playerKey) + val newPlayer = AudioPlayer( + context = applicationContext, + channel = channel, + playerKey = playerKey, + ) audioPlayers[playerKey] = newPlayer } return diff --git a/android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt b/android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt index bd7cb690..ecc3b84a 100644 --- a/android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt +++ b/android/src/main/kotlin/com/simform/audio_waveforms/Utils.kt @@ -50,7 +50,6 @@ object Constants { const val volume = "volume" const val getDuration = "getDuration" const val durationType = "durationType" - const val seekToStart = "seekToStart" const val playerKey = "playerKey" const val current = "current" const val onCurrentDuration = "onCurrentDuration" @@ -58,6 +57,11 @@ object Constants { const val onDidFinishPlayingAudio = "onDidFinishPlayingAudio" const val finishMode = "finishMode" const val finishType = "finishType" + const val extractWaveformData = "extractWaveformData" + const val noOfSamples = "noOfSamples" + const val onCurrentExtractedWaveformData = "onCurrentExtractedWaveformData" + const val waveformData = "waveformData" + const val onExtractionProgressUpdate = "onExtractionProgressUpdate" } enum class FinishMode(val value:Int) { diff --git a/android/src/main/kotlin/com/simform/audio_waveforms/WaveformExtractor.kt b/android/src/main/kotlin/com/simform/audio_waveforms/WaveformExtractor.kt new file mode 100644 index 00000000..1a93544d --- /dev/null +++ b/android/src/main/kotlin/com/simform/audio_waveforms/WaveformExtractor.kt @@ -0,0 +1,248 @@ +package com.simform.audio_waveforms + +import android.media.AudioFormat +import android.media.MediaCodec +import android.media.MediaExtractor +import android.media.MediaFormat +import android.os.Build +import android.os.Handler +import android.os.Looper +import io.flutter.plugin.common.MethodChannel +import java.nio.ByteBuffer +import java.util.concurrent.CountDownLatch +import kotlin.math.pow +import kotlin.math.sqrt + +class WaveformExtractor( + private val path: String, + private val expectedPoints: Int, + private val key: String, + private val methodChannel: MethodChannel, + private val result: MethodChannel.Result, + private val extractorCallBack: ExtractorCallBack +) { + private val handler = Handler(Looper.getMainLooper()) + private var decoder: MediaCodec? = null + private var extractor: MediaExtractor? = null + private var duration = 0L + private var progress = 0F + private var currentProgress = 0F + + @Volatile + private var started = false + private val finishCount = CountDownLatch(1) + private var inputEof = false + private var sampleRate = 0 + private var channels = 1 + private var pcmEncodingBit = 16 + private var totalSamples = 0L + private var perSamplePoints = 0L + + private fun getFormat(path: String): MediaFormat? { + val mediaExtractor = MediaExtractor() + this.extractor = mediaExtractor + mediaExtractor.setDataSource(path) + val trackCount = mediaExtractor.trackCount + repeat(trackCount) { + val format = mediaExtractor.getTrackFormat(it) + val mime = format.getString(MediaFormat.KEY_MIME) ?: "" + if (mime.contains("audio")) { + duration = format.getLong(MediaFormat.KEY_DURATION) / 1000000 + mediaExtractor.selectTrack(it) + return format + } + } + return null + } + + fun startDecode() { + try { + val format = getFormat(path) ?: error("No audio format found") + val mime = format.getString(MediaFormat.KEY_MIME) ?: error("No MIME type found") + decoder = MediaCodec.createDecoderByType(mime).also { + it.configure(format, null, null, 0) + it.setCallback(object : MediaCodec.Callback() { + override fun onInputBufferAvailable(codec: MediaCodec, index: Int) { + if (inputEof) return + val extractor = extractor ?: return + codec.getInputBuffer(index)?.let { buf -> + val size = extractor.readSampleData(buf, 0) + if (size > 0) { + codec.queueInputBuffer(index, 0, size, extractor.sampleTime, 0) + extractor.advance() + } else { + codec.queueInputBuffer( + index, + 0, + 0, + 0, + MediaCodec.BUFFER_FLAG_END_OF_STREAM + ) + inputEof = true + } + } + } + + override fun onOutputFormatChanged(codec: MediaCodec, format: MediaFormat) { + sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE) + channels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT) + pcmEncodingBit = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { + if (format.containsKey(MediaFormat.KEY_PCM_ENCODING)) { + when (format.getInteger(MediaFormat.KEY_PCM_ENCODING)) { + AudioFormat.ENCODING_PCM_16BIT -> 16 + AudioFormat.ENCODING_PCM_8BIT -> 8 + AudioFormat.ENCODING_PCM_FLOAT -> 32 + else -> 16 + } + } else { + 16 + } + } else { + 16 + } + totalSamples = sampleRate.toLong() * duration + perSamplePoints = totalSamples / expectedPoints + } + + override fun onError(codec: MediaCodec, e: MediaCodec.CodecException) { + result.error( + Constants.LOG_TAG, + e.message, + "An error is thrown while decoding the audio file" + ) + finishCount.countDown() + } + + override fun onOutputBufferAvailable( + codec: MediaCodec, + index: Int, + info: MediaCodec.BufferInfo + ) { + if (info.size > 0) { + codec.getOutputBuffer(index)?.let { buf -> + val size = info.size + buf.position(info.offset) + when (pcmEncodingBit) { + 8 -> { + handle8bit(size, buf) + } + 16 -> { + handle16bit(size, buf) + } + 32 -> { + handle32bit(size, buf) + } + } + codec.releaseOutputBuffer(index, false) + } + } + + if (info.isEof()) { + stop() + } + } + + }) + it.start() + } + + } catch (e: Exception) { + result.error( + Constants.LOG_TAG, + e.message, + "An error is thrown before decoding the audio file" + ) + } + + + } + + var sampleData = ArrayList() + private var sampleCount = 0L + private var sampleSum = 0.0 + + private fun rms(value: Float) { + if (sampleCount == perSamplePoints) { + currentProgress++ + progress = currentProgress / expectedPoints + val rms = sqrt(sampleSum / perSamplePoints) + sampleData.add(rms.toFloat()) + extractorCallBack.onProgress(progress) + sampleCount = 0 + sampleSum = 0.0 + + val args: MutableMap = HashMap() + args[Constants.waveformData] = sampleData + args[Constants.progress] = progress + args[Constants.playerKey] = key + methodChannel.invokeMethod( + Constants.onCurrentExtractedWaveformData, + args + ) + } + + sampleCount++ + sampleSum += value.toDouble().pow(2.0) + } + + private fun handle8bit(size: Int, buf: ByteBuffer) { + repeat(size / if (channels == 2) 2 else 1) { + val result = buf.get().toInt() / 128f + if (channels == 2) { + buf.get() + } + rms(result) + } + } + + private fun handle16bit(size: Int, buf: ByteBuffer) { + repeat(size / if (channels == 2) 4 else 2) { + val first = buf.get().toInt() + val second = buf.get().toInt() shl 8 + val value = (first or second) / 32767f + if (channels == 2) { + buf.get() + buf.get() + } + rms(value) + } + } + + private fun handle32bit(size: Int, buf: ByteBuffer) { + repeat(size / if (channels == 2) 8 else 4) { + val first = buf.get().toLong() + val second = buf.get().toLong() shl 8 + val third = buf.get().toLong() shl 16 + val forth = buf.get().toLong() shl 24 + val value = (first or second or third or forth) / 2147483648f + if (channels == 2) { + buf.get() + buf.get() + buf.get() + buf.get() + } + rms(value) + } + } + + fun stop() { + if (!started) return + started = false + decoder?.stop() + decoder?.release() + extractor?.release() + finishCount.countDown() + } + + fun cancel() { + if (!started) return + handler.post { stop() } + finishCount.await() + } +} + +fun MediaCodec.BufferInfo.isEof() = flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0 + +interface ExtractorCallBack { + fun onProgress(value: Float) +} \ No newline at end of file diff --git a/example/android/app/build.gradle b/example/android/app/build.gradle index d61a41fb..a83eee0a 100644 --- a/example/android/app/build.gradle +++ b/example/android/app/build.gradle @@ -26,7 +26,7 @@ apply plugin: 'kotlin-android' apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle" android { - compileSdkVersion 31 + compileSdkVersion 33 compileOptions { sourceCompatibility JavaVersion.VERSION_1_8 @@ -45,7 +45,7 @@ android { // TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html). applicationId "com.simform.audio_waveforms_example" minSdkVersion 21 - targetSdkVersion 31 + targetSdkVersion 33 versionCode flutterVersionCode.toInteger() versionName flutterVersionName } diff --git a/example/lib/main.dart b/example/lib/main.dart index 7caae3e2..eec46f27 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -98,32 +98,32 @@ class _HomeState extends State with WidgetsBindingObserver { final file1 = File('${appDirectory.path}/audio1.mp3'); await file1.writeAsBytes( (await _loadAsset('assets/audios/audio1.mp3')).buffer.asUint8List()); - playerController1.preparePlayer(file1.path); + playerController1.preparePlayer(path: file1.path); ///audio-2 final file2 = File('${appDirectory.path}/audio2.mp3'); await file2.writeAsBytes( (await _loadAsset('assets/audios/audio2.mp3')).buffer.asUint8List()); - playerController2.preparePlayer(file2.path); + playerController2.preparePlayer(path: file2.path); ///audio-3 final file3 = File('${appDirectory.path}/audio3.mp3'); await file3.writeAsBytes( (await _loadAsset('assets/audios/audio3.mp3')).buffer.asUint8List()); - playerController3.preparePlayer(file3.path); + playerController3.preparePlayer(path: file3.path); ///audio-4 final file4 = File('${appDirectory.path}/audio4.mp3'); await file4.writeAsBytes( (await _loadAsset('assets/audios/audio4.mp3')).buffer.asUint8List()); - playerController4.preparePlayer(file4.path); + playerController4.preparePlayer(path: file4.path); } void _pickFile() async { FilePickerResult? result = await FilePicker.platform.pickFiles(); if (result != null) { musicFile = result.files.single.path; - await playerController6.preparePlayer(musicFile!); + await playerController6.preparePlayer(path: musicFile!); } else { debugPrint("File not picked"); } @@ -237,7 +237,8 @@ class _HomeState extends State with WidgetsBindingObserver { child: isRecording ? AudioWaveforms( enableGesture: true, - size: Size(MediaQuery.of(context).size.width / 2, 50), + size: + Size(MediaQuery.of(context).size.width / 2, 50), recorderController: recorderController, waveStyle: const WaveStyle( waveColor: Colors.white, @@ -264,7 +265,8 @@ class _HomeState extends State with WidgetsBindingObserver { readOnly: true, decoration: InputDecoration( hintText: "Type Something...", - hintStyle: const TextStyle(color: Colors.white54), + hintStyle: + const TextStyle(color: Colors.white54), contentPadding: const EdgeInsets.only(top: 16), border: InputBorder.none, suffixIcon: IconButton( @@ -312,7 +314,7 @@ class _HomeState extends State with WidgetsBindingObserver { if (path != null) { debugPrint("Recorded file size: ${File(path).lengthSync()}"); - await playerController5.preparePlayer(path); + await playerController5.preparePlayer(path: path); } } else { await recorderController.record(path); diff --git a/ios/Classes/AudioPlayer.swift b/ios/Classes/AudioPlayer.swift index 9a3e0a29..f884b938 100644 --- a/ios/Classes/AudioPlayer.swift +++ b/ios/Classes/AudioPlayer.swift @@ -2,40 +2,63 @@ import Foundation import AVKit -class AudioPlayer : NSObject, AVAudioPlayerDelegate { +class AudioPlayer: NSObject, AVAudioPlayerDelegate { private var seekToStart = true private var stopWhenCompleted = false private var timer: Timer? private var player: AVAudioPlayer? private var finishMode: FinishMode = FinishMode.stop - var plugin : SwiftAudioWaveformsPlugin - var playerKey :String - init(plugin : SwiftAudioWaveformsPlugin,playerKey : String){ + var plugin: SwiftAudioWaveformsPlugin + var playerKey: String + var flutterChannel: FlutterMethodChannel + private var waveformExtractor: WaveformExtractor? + init(plugin: SwiftAudioWaveformsPlugin, playerKey: String, channel: FlutterMethodChannel) { self.plugin = plugin self.playerKey = playerKey + flutterChannel = channel } - - - func preparePlayer(path: String?,volume: Double?,result: @escaping FlutterResult){ - if(!(path ?? "").isEmpty){ + + func extractWaveformData(path: String?, result: @escaping FlutterResult, noOfSamples: Int?) { + if(!(path ?? "").isEmpty) { + do { + let audioUrl = URL.init(fileURLWithPath: path!) + waveformExtractor = try WaveformExtractor(url: audioUrl, flutterResult: result, channel: flutterChannel) + if(waveformExtractor != nil) { + let data = waveformExtractor!.extractWaveform(samplesPerPixel: noOfSamples, playerKey: playerKey) + waveformExtractor!.cancel() + if(waveformExtractor!.progress == 1.0) { + let waveformData = waveformExtractor!.getChannelMean(data: data!) + result(waveformData) + } + } + } catch { + result(FlutterError(code: Constants.audioWaveforms, message: "Failded to decode audio file", details: nil)) + } + } else { + result(FlutterError(code: Constants.audioWaveforms, message: "Audio file path can't be empty or null", details: nil)) + } + } + + func preparePlayer(path: String?, volume: Double?, result: @escaping FlutterResult) { + if(!(path ?? "").isEmpty) { let audioUrl = URL.init(fileURLWithPath: path!) do { player = try AVAudioPlayer(contentsOf: audioUrl) } catch { result(FlutterError(code: "", message: "Failed to prepare recording", details: nil)) } - + player?.settings player?.prepareToPlay() player?.volume = Float(volume ?? 1.0) result(true) - }else { + } else { result(FlutterError(code: Constants.audioWaveforms, message: "Audio file path can't be empty or null", details: nil)) } - + } - + func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, - successfully flag: Bool){ + successfully flag: Bool) { var finishType = 2 switch self.finishMode { case .loop: @@ -52,14 +75,14 @@ class AudioPlayer : NSObject, AVAudioPlayerDelegate { self.player = nil finishType = 2 } - plugin.flutterChannel.invokeMethod(Constants.onDidFinishPlayingAudio, arguments: [Constants.finishType : finishType, Constants.playerKey: playerKey]) - + plugin.flutterChannel.invokeMethod(Constants.onDidFinishPlayingAudio, arguments: [Constants.finishType: finishType, Constants.playerKey: playerKey]) + } - - func startPlyer(result: @escaping FlutterResult,finishMode: Int?){ - if(finishMode != nil && finishMode == 0){ + + func startPlyer(result: @escaping FlutterResult, finishMode: Int?) { + if(finishMode != nil && finishMode == 0) { self.finishMode = FinishMode.loop - } else if(finishMode != nil && finishMode == 1){ + } else if(finishMode != nil && finishMode == 1) { self.finishMode = FinishMode.pause } else { self.finishMode = FinishMode.stop @@ -69,54 +92,59 @@ class AudioPlayer : NSObject, AVAudioPlayerDelegate { startListening() result(true) } - - func pausePlayer(result: @escaping FlutterResult){ + + func pausePlayer(result: @escaping FlutterResult) { stopListening() player?.pause() result(true) } - - func stopPlayer(result: @escaping FlutterResult){ + + func stopPlayer(result: @escaping FlutterResult) { stopListening() player?.stop() player = nil timer = nil result(true) } - - - func getDuration(_ type:DurationType,_ result: @escaping FlutterResult) throws { + + + func getDuration(_ type: DurationType, _ result: @escaping FlutterResult) throws { if type == .Current { let ms = (player?.currentTime ?? 0) * 1000 result(Int(ms)) - }else{ + } else { let ms = (player?.duration ?? 0) * 1000 result(Int(ms)) } } - - func setVolume(_ volume: Double?,_ result : @escaping FlutterResult) { + + func setVolume(_ volume: Double?, _ result: @escaping FlutterResult) { player?.volume = Float(volume ?? 1.0) result(true) } - - func seekTo(_ time: Int?,_ result : @escaping FlutterResult) { - player?.currentTime = Double(time!/1000) - result(true) + + func seekTo(_ time: Int?, _ result: @escaping FlutterResult) { + if(time != nil) { + player?.currentTime = Double(time! / 1000) + result(true) + } else { + result(false) + } + } - - func startListening(){ + + func startListening() { if #available(iOS 10.0, *) { - timer = Timer.scheduledTimer(withTimeInterval: 0.2, repeats: true, block: {_ in + timer = Timer.scheduledTimer(withTimeInterval: 0.2, repeats: true, block: { _ in let ms = (self.player?.currentTime ?? 0) * 1000 - self.plugin.onCurrentDuration(duration: Int(ms),playerKey: self.playerKey) + self.flutterChannel.invokeMethod(Constants.onCurrentDuration, arguments: [Constants.current: Int(ms), Constants.playerKey: self.playerKey]) }) } else { // Fallback on earlier versions } } - - func stopListening(){ + + func stopListening() { timer?.invalidate() timer = nil } diff --git a/ios/Classes/SwiftAudioWaveformsPlugin.swift b/ios/Classes/SwiftAudioWaveformsPlugin.swift index 0b4eae9d..e9882d53 100644 --- a/ios/Classes/SwiftAudioWaveformsPlugin.swift +++ b/ios/Classes/SwiftAudioWaveformsPlugin.swift @@ -109,6 +109,15 @@ public class SwiftAudioWaveformsPlugin: NSObject, FlutterPlugin { audioPlayers[playerKey] = nil } result(true) + case Constants.extractWaveformData: + let key = args?[Constants.playerKey] as? String + let path = args?[Constants.path] as? String + let noOfSamples = args?[Constants.noOfSamples] as? Int + if(key != nil){ + audioPlayers[key!]?.extractWaveformData(path: path, result: result,noOfSamples: noOfSamples) + } else { + result(FlutterError(code: Constants.audioWaveforms, message: "Can not get waveform data", details: "Player key is null")) + } default: result(FlutterMethodNotImplemented) break @@ -117,12 +126,8 @@ public class SwiftAudioWaveformsPlugin: NSObject, FlutterPlugin { func initPlayer(playerKey: String) { if audioPlayers[playerKey] == nil { - let newPlayer = AudioPlayer(plugin: self,playerKey: playerKey) + let newPlayer = AudioPlayer(plugin: self,playerKey: playerKey,channel: flutterChannel) audioPlayers[playerKey] = newPlayer } } - - func onCurrentDuration(duration: Int, playerKey: String){ - flutterChannel.invokeMethod(Constants.onCurrentDuration, arguments: [Constants.current : duration, Constants.playerKey : playerKey]) - } } diff --git a/ios/Classes/Utils.swift b/ios/Classes/Utils.swift index 97d12ab2..e35363e9 100644 --- a/ios/Classes/Utils.swift +++ b/ios/Classes/Utils.swift @@ -50,6 +50,11 @@ struct Constants { static let onDidFinishPlayingAudio = "onDidFinishPlayingAudio" static let finishMode = "finishMode" static let finishType = "finishType" + static let extractWaveformData = "extractWaveformData" + static let noOfSamples = "noOfSamples" + static let onCurrentExtractedWaveformData = "onCurrentExtractedWaveformData" + static let waveformData = "waveformData" + static let onExtractionProgressUpdate = "onExtractionProgressUpdate" } enum FinishMode : Int{ @@ -57,3 +62,13 @@ enum FinishMode : Int{ case pause = 1 case stop = 2 } + +/// Creates an 2D array of floats +public typealias FloatChannelData = [[Float]] + +/// Extension to fill array with zeros +public extension RangeReplaceableCollection where Iterator.Element: ExpressibleByIntegerLiteral { + init(zeros count: Int) { + self.init(repeating: 0, count: count) + } +} diff --git a/ios/Classes/WaveformExtractor.swift b/ios/Classes/WaveformExtractor.swift new file mode 100644 index 00000000..ff96e5ce --- /dev/null +++ b/ios/Classes/WaveformExtractor.swift @@ -0,0 +1,155 @@ +import Accelerate +import AVFoundation + +public class WaveformExtractor { + + public private(set) var audioFile: AVAudioFile? + private var result: FlutterResult + var flutterChannel: FlutterMethodChannel + private var waveformData = Array() + var progress: Float = 0.0 + var channelCount: Int = 1 + private var currentProgress: Float = 0.0 + private let abortWaveformDataQueue = DispatchQueue(label: "WaveformExtractor", + attributes: .concurrent) + + private var _abortGetWaveformData: Bool = false + + public var abortGetWaveformData: Bool { + get { _abortGetWaveformData } + set { + abortWaveformDataQueue.async(flags: .barrier) { + self._abortGetWaveformData = newValue + } + } + } + public init(url: URL, flutterResult: @escaping FlutterResult, channel: FlutterMethodChannel) throws { + audioFile = try AVAudioFile(forReading: url) + result = flutterResult + self.flutterChannel = channel + } + + deinit { + audioFile = nil + } + + public func extractWaveform(samplesPerPixel: Int?, + offset: Int? = 0, + length: UInt? = nil, playerKey: String) -> FloatChannelData? + { + guard let audioFile = audioFile else { return nil } + + /// prevent division by zero, + minimum resolution + let samplesPerPixel = max(1, samplesPerPixel ?? 100) + + let currentFrame = audioFile.framePosition + + let totalFrameCount = AVAudioFrameCount(audioFile.length) + var framesPerBuffer: AVAudioFrameCount = totalFrameCount / AVAudioFrameCount(samplesPerPixel) + + guard let rmsBuffer = AVAudioPCMBuffer(pcmFormat: audioFile.processingFormat, + frameCapacity: AVAudioFrameCount(framesPerBuffer)) else { return nil } + + channelCount = Int(audioFile.processingFormat.channelCount) + var data = Array(repeating: [Float](zeros: samplesPerPixel), count: channelCount) + + var start: Int + if let offset = offset, offset >= 0 { + start = offset + } else { + start = Int(currentFrame / Int64(framesPerBuffer)) + if let offset = offset, offset < 0 { + start += offset + } + + if start < 0 { + start = 0 + } + } + var startFrame: AVAudioFramePosition = offset == nil ? currentFrame : Int64(start * Int(framesPerBuffer)) + + var end = samplesPerPixel + if let length = length { + end = start + Int(length) + } + + if end > samplesPerPixel { + end = samplesPerPixel + } + if start > end { + result(FlutterError(code: Constants.audioWaveforms, message: "offset is larger than total length.", details: "Please select less number of samples")) + return nil + } + + for i in start ..< end { + + if abortGetWaveformData { + audioFile.framePosition = currentFrame + abortGetWaveformData = false + return nil + } + + do { + audioFile.framePosition = startFrame + /// Read portion of the buffer + try audioFile.read(into: rmsBuffer, frameCount: framesPerBuffer) + + } catch let err as NSError { + result(FlutterError(code: Constants.audioWaveforms, message: "Couldn't read into buffer. \(err)", details: nil)) + return nil + } + + guard let floatData = rmsBuffer.floatChannelData else { return nil } + /// Calculating RMS(Root mean square) + for channel in 0 ..< channelCount { + var rms: Float = 0.0 + vDSP_rmsqv(floatData[channel], 1, &rms, vDSP_Length(rmsBuffer.frameLength)) + data[channel][i] = rms + + } + + /// Update progress + currentProgress += 1 + progress = currentProgress / Float(samplesPerPixel) + + /// Send to flutter channel + flutterChannel.invokeMethod(Constants.onCurrentExtractedWaveformData, arguments: [ + Constants.waveformData: getChannelMean(data: data) as Any, + Constants.progress: progress, + Constants.playerKey: playerKey + ]) + + startFrame += AVAudioFramePosition(framesPerBuffer) + + if startFrame + AVAudioFramePosition(framesPerBuffer) > totalFrameCount { + framesPerBuffer = totalFrameCount - AVAudioFrameCount(startFrame) + if framesPerBuffer <= 0 { break } + } + } + + audioFile.framePosition = currentFrame + + return data + } + + func getChannelMean(data: FloatChannelData) -> [Float] { + waveformData.removeAll() + if(channelCount == 2 && data[0].isEmpty == false && data[1].isEmpty == false) { + for (ele1, ele2) in zip(data[0], data[1]) { + waveformData.append((ele1 + ele2) / 2) + } + } else if(data[0].isEmpty == false) { + waveformData = data[0] + } + else if (data[1].isEmpty == false) { + waveformData = data[1] + } else { + result(FlutterError(code: Constants.audioWaveforms, message: "Can not get waveform mean", details: "Both audio channels are null")) + } + return waveformData + } + + public func cancel() { + abortGetWaveformData = true + } +} diff --git a/lib/src/base/audio_waveforms_interface.dart b/lib/src/base/audio_waveforms_interface.dart index 4d54bc4d..98cdc233 100644 --- a/lib/src/base/audio_waveforms_interface.dart +++ b/lib/src/base/audio_waveforms_interface.dart @@ -143,12 +143,26 @@ class AudioWaveformsInterface { return result ?? false; } + Future> extractWaveformData({ + required String key, + required String path, + required int noOfSamples, + }) async { + final result = + await _methodChannel.invokeMethod(Constants.extractWaveformData, { + Constants.playerKey: key, + Constants.path: path, + Constants.noOfSamples: noOfSamples, + }); + return List.from(result ?? []); + } + Future stopAllPlayers() async { var result = await _methodChannel.invokeMethod(Constants.stopAllPlayers); return result ?? false; } - void setMethodCallHandler() async { + Future setMethodCallHandler() async { _methodChannel.setMethodCallHandler((call) async { switch (call.method) { case Constants.onCurrentDuration: @@ -174,6 +188,18 @@ class AudioWaveformsInterface { ?._playerState = playerState; } break; + case Constants.onCurrentExtractedWaveformData: + var key = call.arguments[Constants.playerKey]; + var progress = call.arguments[Constants.progress]; + var waveformData = + List.from(call.arguments[Constants.waveformData]); + PlatformStreams.instance.addExtractedWaveformDataEvent( + PlayerIdentifier>(key, waveformData), + ); + PlatformStreams.instance.addExtractionProgress( + PlayerIdentifier(key, progress), + ); + break; } }); } diff --git a/lib/src/base/constants.dart b/lib/src/base/constants.dart index 3efe7899..a2bfbb42 100644 --- a/lib/src/base/constants.dart +++ b/lib/src/base/constants.dart @@ -37,6 +37,11 @@ class Constants { static const String onDidFinishPlayingAudio = "onDidFinishPlayingAudio"; static const String finishtype = "finishType"; static const String finishMode = "finishMode"; + static const String extractWaveformData = "extractWaveformData"; + static const String noOfSamples = "noOfSamples"; + static const String waveformData = "waveformData"; + static const String onCurrentExtractedWaveformData = + "onCurrentExtractedWaveformData"; static const int byteSize = 8; } diff --git a/lib/src/base/platform_streams.dart b/lib/src/base/platform_streams.dart index 5eb1b9e3..d2116883 100644 --- a/lib/src/base/platform_streams.dart +++ b/lib/src/base/platform_streams.dart @@ -17,12 +17,16 @@ class PlatformStreams { bool isInitialised = false; - void init() { + Future init() async { _currentDurationController = StreamController>.broadcast(); _playerStateController = StreamController>.broadcast(); - AudioWaveformsInterface.instance.setMethodCallHandler(); + _extractedWaveformDataController = + StreamController>>.broadcast(); + _extractionProgressController = + StreamController>.broadcast(); + await AudioWaveformsInterface.instance.setMethodCallHandler(); isInitialised = true; } @@ -32,8 +36,17 @@ class PlatformStreams { Stream> get onPlayerStateChanged => _playerStateController.stream; + Stream>> get onCurrentExtractedWaveformData => + _extractedWaveformDataController.stream; + + Stream> get onExtractionProgress => + _extractionProgressController.stream; + late StreamController> _currentDurationController; late StreamController> _playerStateController; + late StreamController>> + _extractedWaveformDataController; + late StreamController> _extractionProgressController; void addCurrentDurationEvent(PlayerIdentifier playerIdentifier) { if (!_currentDurationController.isClosed) { @@ -47,9 +60,24 @@ class PlatformStreams { } } - void dispose() async { - await _currentDurationController.close(); - await _playerStateController.close(); + void addExtractedWaveformDataEvent( + PlayerIdentifier> playerIdentifier) { + if (!_extractedWaveformDataController.isClosed) { + _extractedWaveformDataController.add(playerIdentifier); + } + } + + void addExtractionProgress(PlayerIdentifier progress)async{ + if (!_extractionProgressController.isClosed) { + _extractionProgressController.add(progress); + } + } + + void dispose() { + _currentDurationController.close(); + _playerStateController.close(); + _extractedWaveformDataController.close(); + _currentDurationController.close(); AudioWaveformsInterface.instance.removeMethodCallHandeler(); isInitialised = false; } diff --git a/lib/src/base/utils.dart b/lib/src/base/utils.dart index 0573f16b..451a778a 100644 --- a/lib/src/base/utils.dart +++ b/lib/src/base/utils.dart @@ -92,9 +92,6 @@ enum IosEncoder { /// States of audio player enum PlayerState { - /// When reading of an audio file is completed - readingComplete, - /// When player is [initialised] initialized, @@ -144,3 +141,20 @@ enum FinishMode { // TODO: remove this function if we remove support for flutter 2.x T? ambiguate(T? object) => object; + +/// An enum to decide which type of gestures will be used. +enum SeekGestureType { + /// This gesture will allow seeking with dragging and also tap. + /// This gesture will be more useful when there are limited number of waves + /// and they don't exceed the screen. + seekAndTap, + + /// This gesture will allow seeking with only tap but now waveforms can + /// now be dragged on X-axis to move forward in the timeline. + /// This gesture will be more useful when there are greater number of waves + /// and they exceed the screen. + scrollAndTap, + + /// Seek gestures are disabled. + none +} diff --git a/lib/src/controllers/player_controller.dart b/lib/src/controllers/player_controller.dart index 5621b94c..c1ee3727 100644 --- a/lib/src/controllers/player_controller.dart +++ b/lib/src/controllers/player_controller.dart @@ -1,9 +1,6 @@ import 'dart:async'; import 'dart:io'; -// TODO: Remove when fully migrated to flutter 3.3 -import 'dart:typed_data'; //ignore: unnecessary_import - import 'package:audio_waveforms/audio_waveforms.dart'; import 'package:audio_waveforms/src/base/constants.dart'; import 'package:audio_waveforms/src/base/platform_streams.dart'; @@ -19,19 +16,25 @@ class PlayerController extends ChangeNotifier { /// Provides data we got after reading audio file. Uint8List? get bufferData => _bufferData; + final List _waveformData = []; + + List get waveformData => _waveformData; + PlayerState _playerState = PlayerState.stopped; /// Provides current state of the player PlayerState get playerState => _playerState; - void setPlayerState(PlayerState state) { + bool _shouldRefresh = true; + + bool get shouldRefresh => _shouldRefresh; + + void _setPlayerState(PlayerState state) { _playerState = state; PlatformStreams.instance .addPlayerStateEvent(PlayerIdentifier(playerKey, state)); } - String? _audioFilePath; - int _maxDuration = -1; /// Provides [max] duration of currently provided audio file. @@ -42,6 +45,10 @@ class PlayerController extends ChangeNotifier { /// An unique key string associated with [this] player only String get playerKey => _playerKey.toString(); + final bool _shouldClearLabels = false; + + bool get shouldClearLabels => _shouldClearLabels; + /// A stream to get current state of the player. This stream /// will emit event whenever there is change in the playerState. Stream get onPlayerStateChanged => @@ -52,6 +59,12 @@ class PlayerController extends ChangeNotifier { Stream get onCurrentDurationChanged => PlatformStreams.instance.onDurationChanged.filter(playerKey); + Stream> get onCurrentExtractedWaveformData => + PlatformStreams.instance.onCurrentExtractedWaveformData.filter(playerKey); + + Stream get onExtractionProgress => + PlatformStreams.instance.onExtractionProgress.filter(playerKey); + PlayerController() { if (!PlatformStreams.instance.isInitialised) { PlatformStreams.instance.init(); @@ -59,24 +72,6 @@ class PlayerController extends ChangeNotifier { PlatformStreams.instance.playerControllerFactory.addAll({playerKey: this}); } - /// Reads bytes from audio file - Future _readAudioFile(String path) async { - _audioFilePath = path; - File file = File(path); - if (await file.exists()) { - var bytes = await file.readAsBytes(); - _bufferData = bytes; - if (_bufferData != null) { - setPlayerState(PlayerState.readingComplete); - } else { - throw "Can't read given audio file"; - } - notifyListeners(); - } else { - throw "Please provide a valid file path"; - } - } - /// Calls platform to prepare player. /// /// Path is required parameter for providing location of the @@ -86,25 +81,79 @@ class PlayerController extends ChangeNotifier { /// as mute and 1.0 as max volume. Providing value greater 1.0 is also /// treated same as 1.0 (max volume). /// - /// This function first reads bytes from audio file so as soon as - /// it completes, it prepares audio player. + /// Waveforms also will be extracted when with function which can be + /// accessed using [waveformData]. Passing false to [shouldExtractWaveform] + /// will prevent extracting of waveforms. + /// + /// Waveforms also can be extracted using [extractWaveformData] function + /// which can be stored locally or over the server. This data can be passed + /// directly passed to AudioFileWaveforms widget. + /// This will save the resources when extracting waveforms for same file + /// everytime. /// - Future preparePlayer(String path, [double? volume]) async { + /// [noOfSamples] indicates no of extracted data points. This will determine + /// number of bars in the waveform. + /// + /// Defaults to 100. + Future preparePlayer({ + required String path, + double? volume, + bool shouldExtractWaveform = true, + int noOfSamples = 100, + }) async { path = Uri.parse(path).path; + final isPrepared = await AudioWaveformsInterface.instance + .preparePlayer(path, playerKey, volume); + if (isPrepared) { + _maxDuration = await getDuration(); + _setPlayerState(PlayerState.initialized); + } - await _readAudioFile(path); - if ((_playerState == PlayerState.readingComplete && - _audioFilePath != null)) { - final isPrepared = await AudioWaveformsInterface.instance - .preparePlayer(path, _playerKey.toString(), volume); - if (isPrepared) { - _maxDuration = await getDuration(); - setPlayerState(PlayerState.initialized); - } - notifyListeners(); - } else { - throw "Can not prepare player without reading audio file"; + if (shouldExtractWaveform) { + extractWaveformData( + path: path, + noOfSamples: noOfSamples, + ).then( + (value) { + waveformData + ..clear() + ..addAll(value); + notifyListeners(); + }, + ); } + notifyListeners(); + } + + /// Extracts waveform data from provided audio file path. + /// [noOfSamples] indicates number of extracted data points. This will + /// determine number of bars in the waveform. + /// + /// This function will decode whole audio file and will calculate RMS + /// according to provided number of samples. So it may take a while to fully + /// decode audio file, specifically on android. + /// + /// For example, an audio file of 58 min and about 18 MB of size took about + /// 4 minutes to decode on android while the same file took about 6-7 seconds + /// on IOS. + /// + /// Providing less number if sample doesn't make a difference because it + /// still have to decode whole file. + /// + /// noOfSamples defaults to 100. + Future> extractWaveformData({ + required String path, + int noOfSamples = 100, + }) async { + path = Uri.parse(path).path; + final result = await AudioWaveformsInterface.instance.extractWaveformData( + key: playerKey, + path: path, + noOfSamples: noOfSamples, + ); + notifyListeners(); + + return result; } /// A function to start the player to play/resume the audio. @@ -114,26 +163,30 @@ class PlayerController extends ChangeNotifier { /// /// See also: /// * [FinishMode] - Future startPlayer({FinishMode finishMode = FinishMode.stop}) async { + Future startPlayer({ + FinishMode finishMode = FinishMode.stop, + bool forceRefresh = true, + }) async { if (_playerState == PlayerState.initialized || _playerState == PlayerState.paused) { final isStarted = await AudioWaveformsInterface.instance .startPlayer(_playerKey.toString(), finishMode); if (isStarted) { - setPlayerState(PlayerState.playing); + _setPlayerState(PlayerState.playing); } else { throw "Failed to start player"; } } + _setRefresh(forceRefresh); notifyListeners(); } - /// A function to pause currently playing audio. + /// Pauses currently playing audio. Future pausePlayer() async { - final isPaused = await AudioWaveformsInterface.instance - .pausePlayer(_playerKey.toString()); + final isPaused = + await AudioWaveformsInterface.instance.pausePlayer(playerKey); if (isPaused) { - setPlayerState(PlayerState.paused); + _setPlayerState(PlayerState.paused); } notifyListeners(); } @@ -143,7 +196,7 @@ class PlayerController extends ChangeNotifier { final isStopped = await AudioWaveformsInterface.instance .stopPlayer(_playerKey.toString()); if (isStopped) { - setPlayerState(PlayerState.stopped); + _setPlayerState(PlayerState.stopped); } notifyListeners(); } @@ -169,7 +222,7 @@ class PlayerController extends ChangeNotifier { /// Default to Duration.max. Future getDuration([DurationType? durationType]) async { final duration = await AudioWaveformsInterface.instance - .getDuration(_playerKey.toString(), durationType?.index ?? 1); + .getDuration(playerKey, durationType?.index ?? 1); return duration ?? -1; } @@ -180,8 +233,7 @@ class PlayerController extends ChangeNotifier { Future seekTo(int progress) async { if (progress < 0) return; if (_playerState == PlayerState.playing) { - await AudioWaveformsInterface.instance - .seekTo(_playerKey.toString(), progress); + await AudioWaveformsInterface.instance.seekTo(playerKey, progress); } } @@ -215,6 +267,17 @@ class PlayerController extends ChangeNotifier { super.dispose(); } + /// Sets [_shouldRefresh] flag with provided boolean parameter. + void _setRefresh(bool refresh) { + _shouldRefresh = refresh; + } + + /// Sets [_shouldRefresh] flag with provided boolean parameter. + void setRefresh(bool refresh) { + _shouldRefresh = refresh; + notifyListeners(); + } + @override bool operator ==(Object other) { return other is PlayerController && other.playerKey == playerKey;