Эх сурвалжийг харах

bring back audio message cells

cyberta 4 жил өмнө
parent
commit
5015ea6c7c

+ 12 - 0
deltachat-ios.xcodeproj/project.pbxproj

@@ -7,6 +7,9 @@
 	objects = {
 
 /* Begin PBXBuildFile section */
+		3008CB7224F93EB900E6A617 /* NewAudioMessageCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3008CB7124F93EB900E6A617 /* NewAudioMessageCell.swift */; };
+		3008CB7424F9436C00E6A617 /* NewAudioPlayerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3008CB7324F9436C00E6A617 /* NewAudioPlayerView.swift */; };
+		3008CB7624F95B6D00E6A617 /* NewAudioController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3008CB7524F95B6D00E6A617 /* NewAudioController.swift */; };
 		300C509D234B551900F8AE22 /* TextMediaMessageCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = 300C509C234B551900F8AE22 /* TextMediaMessageCell.swift */; };
 		300C50A1234BDAB800F8AE22 /* TextMediaMessageSizeCalculator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 300C50A0234BDAB800F8AE22 /* TextMediaMessageSizeCalculator.swift */; };
 		30149D9322F21129003C12B5 /* QrViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 30149D9222F21129003C12B5 /* QrViewController.swift */; };
@@ -246,6 +249,9 @@
 /* Begin PBXFileReference section */
 		21EE28844E7A690D73BF5285 /* Pods-deltachat-iosTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-deltachat-iosTests.debug.xcconfig"; path = "Pods/Target Support Files/Pods-deltachat-iosTests/Pods-deltachat-iosTests.debug.xcconfig"; sourceTree = "<group>"; };
 		2F7009234DB9408201A6CDCB /* Pods_deltachat_iosTests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_deltachat_iosTests.framework; sourceTree = BUILT_PRODUCTS_DIR; };
+		3008CB7124F93EB900E6A617 /* NewAudioMessageCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NewAudioMessageCell.swift; sourceTree = "<group>"; };
+		3008CB7324F9436C00E6A617 /* NewAudioPlayerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NewAudioPlayerView.swift; sourceTree = "<group>"; };
+		3008CB7524F95B6D00E6A617 /* NewAudioController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NewAudioController.swift; sourceTree = "<group>"; };
 		300C509C234B551900F8AE22 /* TextMediaMessageCell.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = TextMediaMessageCell.swift; sourceTree = "<group>"; };
 		300C50A0234BDAB800F8AE22 /* TextMediaMessageSizeCalculator.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = TextMediaMessageSizeCalculator.swift; sourceTree = "<group>"; };
 		30149D9222F21129003C12B5 /* QrViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = QrViewController.swift; sourceTree = "<group>"; };
@@ -786,6 +792,7 @@
 		30FDB6B224D18E390066C48D /* Chat */ = {
 			isa = PBXGroup;
 			children = (
+				3008CB7524F95B6D00E6A617 /* NewAudioController.swift */,
 				30FDB6F824D1C1000066C48D /* ChatViewControllerNew.swift */,
 				30FDB6B524D193DD0066C48D /* Views */,
 			);
@@ -798,6 +805,7 @@
 				30FDB6B624D193DD0066C48D /* Cells */,
 				30E348DE24F3F819005C93D1 /* ChatTableView.swift */,
 				30F8817524DA97DA0023780E /* BackgroundContainer.swift */,
+				3008CB7324F9436C00E6A617 /* NewAudioPlayerView.swift */,
 			);
 			path = Views;
 			sourceTree = "<group>";
@@ -810,6 +818,7 @@
 				30E348E024F53772005C93D1 /* NewImageTextCell.swift */,
 				30E348E424F6647D005C93D1 /* NewFileTextCell.swift */,
 				30A4149624F6EFBE00EC91EB /* NewInfoMessageCell.swift */,
+				3008CB7124F93EB900E6A617 /* NewAudioMessageCell.swift */,
 			);
 			path = Cells;
 			sourceTree = "<group>";
@@ -1426,6 +1435,7 @@
 				305961F02346125100C80F33 /* NSConstraintLayoutSet.swift in Sources */,
 				3059620E234614E700C80F33 /* DcContact+Extension.swift in Sources */,
 				AED423D7249F580700B6B2BB /* BlockedContactsViewController.swift in Sources */,
+				3008CB7424F9436C00E6A617 /* NewAudioPlayerView.swift in Sources */,
 				AED62BCE247687E6009E220D /* LocationStreamingIndicator.swift in Sources */,
 				305961F72346125100C80F33 /* MessageCollectionViewCell.swift in Sources */,
 				AE851AC9227C77CF00ED86F0 /* Media.swift in Sources */,
@@ -1461,6 +1471,7 @@
 				305961E42346125100C80F33 /* MessageKitDateFormatter.swift in Sources */,
 				AEC67A1C241CE9E4007DDBE1 /* AppStateRestorer.swift in Sources */,
 				305961D32346125100C80F33 /* MessagesViewController+Keyboard.swift in Sources */,
+				3008CB7224F93EB900E6A617 /* NewAudioMessageCell.swift in Sources */,
 				305961EF2346125100C80F33 /* HorizontalEdgeInsets.swift in Sources */,
 				305961D62346125100C80F33 /* MessageInputBar.swift in Sources */,
 				305961ED2346125100C80F33 /* DetectorType.swift in Sources */,
@@ -1505,6 +1516,7 @@
 				AEE6EC482283045D00EDC689 /* EditSettingsController.swift in Sources */,
 				30E348DF24F3F819005C93D1 /* ChatTableView.swift in Sources */,
 				30E348E124F53772005C93D1 /* NewImageTextCell.swift in Sources */,
+				3008CB7624F95B6D00E6A617 /* NewAudioController.swift in Sources */,
 				305961DF2346125100C80F33 /* MessageCellDelegate.swift in Sources */,
 				302B84CE2397F6CD001C261F /* URL+Extension.swift in Sources */,
 				7A9FB1441FB061E2001FEA36 /* AppDelegate.swift in Sources */,

+ 8 - 4
deltachat-ios/Chat/ChatViewControllerNew.swift

@@ -75,7 +75,7 @@ class ChatViewControllerNew: UITableViewController {
     }()
 
     /// The `BasicAudioController` controll the AVAudioPlayer state (play, pause, stop) and udpate audio cell UI accordingly.
-    //open lazy var audioController = BasicAudioController(messageCollectionView: messagesCollectionView)
+    private lazy var audioController = NewAudioController(dcContext: dcContext, chatId: chatId)
 
     private var disableWriting: Bool
     private var showNamesAboveMessage: Bool
@@ -118,16 +118,15 @@ class ChatViewControllerNew: UITableViewController {
         tableView.register(NewImageTextCell.self, forCellReuseIdentifier: "image")
         tableView.register(NewFileTextCell.self, forCellReuseIdentifier: "file")
         tableView.register(NewInfoMessageCell.self, forCellReuseIdentifier: "info")
+        tableView.register(NewAudioMessageCell.self, forCellReuseIdentifier: "audio")
         tableView.rowHeight = UITableView.automaticDimension
         tableView.separatorStyle = .none
         tableView.allowsSelection = false
-        //messagesCollectionView.register(InfoMessageCell.self)
         super.viewDidLoad()
         if !dcContext.isConfigured() {
             // TODO: display message about nothing being configured
             return
         }
-        //configureMessageCollectionView()
         configureEmptyStateView()
 
         if !disableWriting {
@@ -267,7 +266,7 @@ class ChatViewControllerNew: UITableViewController {
         if let ephemeralTimerModifiedObserver = self.ephemeralTimerModifiedObserver {
             nc.removeObserver(ephemeralTimerModifiedObserver)
         }
-        //audioController.stopAnyOngoingPlaying()
+        audioController.stopAnyOngoingPlaying()
         stopTimer()
     }
 
@@ -318,6 +317,11 @@ class ChatViewControllerNew: UITableViewController {
             cell = tableView.dequeueReusableCell(withIdentifier: "image", for: indexPath) as? NewImageTextCell ?? NewImageTextCell()
         } else if message.type == DC_MSG_FILE {
             cell = tableView.dequeueReusableCell(withIdentifier: "file", for: indexPath) as? NewFileTextCell ?? NewFileTextCell()
+        } else if message.type == DC_MSG_AUDIO ||  message.type == DC_MSG_VOICE {
+            let audioMessageCell: NewAudioMessageCell = tableView.dequeueReusableCell(withIdentifier: "audio",
+                                                                                      for: indexPath) as? NewAudioMessageCell ?? NewAudioMessageCell()
+            audioController.update(audioMessageCell, with: message.id)
+            cell = audioMessageCell
         } else {
             cell = tableView.dequeueReusableCell(withIdentifier: "text", for: indexPath) as? NewTextMessageCell ?? NewTextMessageCell()
         }

+ 211 - 0
deltachat-ios/Chat/NewAudioController.swift

@@ -0,0 +1,211 @@
+import UIKit
+import AVFoundation
+import DcCore
+
+/// The `PlayerState` indicates the current audio controller state
+public enum PlayerState {
+
+    /// The audio controller is currently playing a sound
+    case playing
+
+    /// The audio controller is currently in pause state
+    case pause
+
+    /// The audio controller is not playing any sound and audioPlayer is nil
+    case stopped
+}
+
+/// The `NewAudioController` update UI for current audio cell that is playing a sound
+/// and also creates and manage an `AVAudioPlayer` states, play, pause and stop.
+open class NewAudioController: NSObject, AVAudioPlayerDelegate, NewAudioMessageCellDelegate {
+
+    lazy var audioSession: AVAudioSession = {
+        let audioSession = AVAudioSession.sharedInstance()
+        _ = try? audioSession.setCategory(AVAudioSession.Category.playback, options: [.defaultToSpeaker])
+        return audioSession
+    }()
+
+    /// The `AVAudioPlayer` that is playing the sound
+    open var audioPlayer: AVAudioPlayer?
+
+    /// The `AudioMessageCell` that is currently playing sound
+    open weak var playingCell: NewAudioMessageCell?
+
+    /// The `MessageType` that is currently playing sound
+    open var playingMessage: DcMsg?
+
+    /// Specify if current audio controller state: playing, in pause or none
+    open private(set) var state: PlayerState = .stopped
+
+    private let dcContext: DcContext
+    private let chatId: Int
+    private let chat: DcChat
+
+    /// The `Timer` that update playing progress
+    internal var progressTimer: Timer?
+
+    // MARK: - Init Methods
+
+    public init(dcContext: DcContext, chatId: Int) {
+        self.dcContext = dcContext
+        self.chatId = chatId
+        self.chat = dcContext.getChat(chatId: chatId)
+        super.init()
+        NotificationCenter.default.addObserver(self,
+                                               selector: #selector(audioRouteChanged),
+                                               name: AVAudioSession.routeChangeNotification,
+                                               object: AVAudioSession.sharedInstance())
+    }
+
+    deinit {
+        NotificationCenter.default.removeObserver(self)
+    }
+
+    // MARK: - Methods
+
+    /// - Parameters:
+    ///   - cell: The `NewAudioMessageCell` that needs to be configure.
+    ///   - message: The `DcMsg` that configures the cell.
+    ///
+    /// - Note:
+    ///   This protocol method is called by MessageKit every time an audio cell needs to be configure
+    func update(_ cell: NewAudioMessageCell, with messageId: Int) {
+        cell.delegate = self
+        if playingMessage?.id == messageId, let player = audioPlayer {
+            playingCell = cell
+            cell.audioPlayerView.setProgress((player.duration == 0) ? 0 : Float(player.currentTime/player.duration))
+            cell.audioPlayerView.showPlayLayout((player.isPlaying == true) ? true : false)
+            cell.audioPlayerView.setDuration(duration: player.currentTime)
+        }
+    }
+
+    public func playButtonTapped(cell: NewAudioMessageCell, messageId: Int) {
+            let message = DcMsg(id: messageId)
+            guard state != .stopped else {
+                // There is no audio sound playing - prepare to start playing for given audio message
+                playSound(for: message, in: cell)
+                return
+            }
+            if playingMessage?.messageId == message.messageId {
+                // tap occur in the current cell that is playing audio sound
+                if state == .playing {
+                    pauseSound(in: cell)
+                } else {
+                    resumeSound()
+                }
+            } else {
+                // tap occur in a difference cell that the one is currently playing sound. First stop currently playing and start the sound for given message
+                stopAnyOngoingPlaying()
+                playSound(for: message, in: cell)
+            }
+    }
+
+    /// Used to start play audio sound
+    ///
+    /// - Parameters:
+    ///   - message: The `DcMsg` that contain the audio item to be played.
+    ///   - audioCell: The `NewAudioMessageCell` that needs to be updated while audio is playing.
+    open func playSound(for message: DcMsg, in audioCell: NewAudioMessageCell) {
+        if message.type == DC_MSG_AUDIO || message.type == DC_MSG_VOICE {
+            _ = try? audioSession.setActive(true)
+            playingCell = audioCell
+            playingMessage = message
+            if let fileUrl = message.fileURL, let player = try? AVAudioPlayer(contentsOf: fileUrl) {
+                audioPlayer = player
+                audioPlayer?.prepareToPlay()
+                audioPlayer?.delegate = self
+                audioPlayer?.play()
+                state = .playing
+                audioCell.audioPlayerView.showPlayLayout(true)  // show pause button on audio cell
+                startProgressTimer()
+            }
+
+            print("NewAudioController failed play sound becasue given message kind is not Audio")
+        }
+    }
+
+    /// Used to pause the audio sound
+    ///
+    /// - Parameters:
+    ///   - message: The `MessageType` that contain the audio item to be pause.
+    ///   - audioCell: The `AudioMessageCell` that needs to be updated by the pause action.
+    open func pauseSound(in audioCell: NewAudioMessageCell) {
+        audioPlayer?.pause()
+        state = .pause
+        audioCell.audioPlayerView.showPlayLayout(false) // show play button on audio cell
+        progressTimer?.invalidate()
+    }
+
+    /// Stops any ongoing audio playing if exists
+    open func stopAnyOngoingPlaying() {
+        // If the audio player is nil then we don't need to go through the stopping logic
+        guard let player = audioPlayer else { return }
+        player.stop()
+        state = .stopped
+        if let cell = playingCell {
+            cell.audioPlayerView.setProgress(0.0)
+            cell.audioPlayerView.showPlayLayout(false)
+            cell.audioPlayerView.setDuration(duration: player.duration)
+        }
+        progressTimer?.invalidate()
+        progressTimer = nil
+        audioPlayer = nil
+        playingMessage = nil
+        playingCell = nil
+        try? audioSession.setActive(false)
+    }
+
+    /// Resume a currently pause audio sound
+    open func resumeSound() {
+        guard let player = audioPlayer, let cell = playingCell else {
+            stopAnyOngoingPlaying()
+            return
+        }
+        player.prepareToPlay()
+        player.play()
+        state = .playing
+        startProgressTimer()
+        cell.audioPlayerView.showPlayLayout(true) // show pause button on audio cell
+    }
+
+    // MARK: - Fire Methods
+    @objc private func didFireProgressTimer(_ timer: Timer) {
+        guard let player = audioPlayer, let cell = playingCell else {
+            return
+        }
+        cell.audioPlayerView.setProgress((player.duration == 0) ? 0 : Float(player.currentTime/player.duration))
+        cell.audioPlayerView.setDuration(duration: player.currentTime)
+    }
+
+    // MARK: - Private Methods
+    private func startProgressTimer() {
+        progressTimer?.invalidate()
+        progressTimer = nil
+        progressTimer = Timer.scheduledTimer(timeInterval: 0.1,
+                                             target: self,
+                                             selector: #selector(NewAudioController.didFireProgressTimer(_:)),
+                                             userInfo: nil,
+                                             repeats: true)
+    }
+
+    // MARK: - AVAudioPlayerDelegate
+    open func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
+        stopAnyOngoingPlaying()
+    }
+
+    open func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) {
+        stopAnyOngoingPlaying()
+    }
+
+    // MARK: - AVAudioSession.routeChangeNotification handler
+    @objc func audioRouteChanged(note: Notification) {
+      if let userInfo = note.userInfo {
+        if let reason = userInfo[AVAudioSessionRouteChangeReasonKey] as? Int {
+            if reason == AVAudioSession.RouteChangeReason.oldDeviceUnavailable.rawValue {
+            // headphones plugged out
+            resumeSound()
+          }
+        }
+      }
+    }
+}

+ 65 - 0
deltachat-ios/Chat/Views/Cells/NewAudioMessageCell.swift

@@ -0,0 +1,65 @@
+import UIKit
+import DcCore
+
+public protocol NewAudioMessageCellDelegate: AnyObject {
+    func playButtonTapped(cell: NewAudioMessageCell, messageId: Int)
+}
+
+public class NewAudioMessageCell: BaseMessageCell {
+
+    public weak var delegate: NewAudioMessageCellDelegate?
+    lazy var audioPlayerView: NewAudioPlayerView = {
+        let view = NewAudioPlayerView()
+        view.translatesAutoresizingMaskIntoConstraints = false
+        return view
+    }()
+
+    lazy var messageLabel: UILabel = {
+        let label = UILabel()
+        label.translatesAutoresizingMaskIntoConstraints = false
+        label.numberOfLines = 0
+        label.lineBreakMode = .byWordWrapping
+        return label
+    }()
+
+    private var messageId: Int = 0
+
+    override func setupSubviews() {
+        super.setupSubviews()
+        let spacerView = UIView()
+        spacerView.translatesAutoresizingMaskIntoConstraints = false
+        mainContentView.addArrangedSubview(audioPlayerView)
+        mainContentView.addArrangedSubview(messageLabel)
+        audioPlayerView.constraintWidthTo(250).isActive = true
+        let gestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(onPlayButtonTapped))
+        gestureRecognizer.numberOfTapsRequired = 1
+        audioPlayerView.playButton.addGestureRecognizer(gestureRecognizer)
+    }
+
+    @objc public func onPlayButtonTapped() {
+        delegate?.playButtonTapped(cell: self, messageId: messageId)
+    }
+
+    override func update(msg: DcMsg, messageStyle: UIRectCorner, isAvatarVisible: Bool) {
+        //audioPlayerView.reset()
+        messageId = msg.id
+        if let text = msg.text {
+            mainContentView.spacing = text.isEmpty ? 0 : 8
+            messageLabel.text = text
+        } else {
+            mainContentView.spacing = 0
+        }
+
+        super.update(msg: msg, messageStyle: messageStyle, isAvatarVisible: isAvatarVisible)
+    }
+
+    public override func prepareForReuse() {
+        super.prepareForReuse()
+        mainContentView.spacing = 0
+        messageLabel.text = nil
+        messageLabel.attributedText = nil
+        messageId = 0
+        delegate = nil
+        audioPlayerView.reset()
+    }
+}

+ 110 - 0
deltachat-ios/Chat/Views/NewAudioPlayerView.swift

@@ -0,0 +1,110 @@
+import Foundation
+import UIKit
+
+open class NewAudioPlayerView: UIView {
+
+    /// The play button view to display on audio messages.
+    lazy var playButton: UIButton = {
+        let playButton = UIButton(type: .custom)
+        let playImage = UIImage(named: "play")
+        let pauseImage = UIImage(named: "pause")
+        playButton.setImage(playImage?.withRenderingMode(.alwaysTemplate), for: .normal)
+        playButton.setImage(pauseImage?.withRenderingMode(.alwaysTemplate), for: .selected)
+        playButton.imageView?.contentMode = .scaleAspectFit
+        playButton.contentVerticalAlignment = .fill
+        playButton.contentHorizontalAlignment = .fill
+        playButton.translatesAutoresizingMaskIntoConstraints = false
+        playButton.isUserInteractionEnabled = true
+        return playButton
+    }()
+
+    /// The time duration lable to display on audio messages.
+    private lazy var durationLabel: UILabel = {
+        let durationLabel = UILabel(frame: CGRect.zero)
+        durationLabel.textAlignment = .right
+        durationLabel.font = UIFont.preferredFont(forTextStyle: .body)
+        durationLabel.adjustsFontForContentSizeCategory = true
+        durationLabel.text = "0:00"
+        durationLabel.translatesAutoresizingMaskIntoConstraints = false
+        return durationLabel
+    }()
+
+    private lazy var progressView: UIProgressView = {
+        let progressView = UIProgressView(progressViewStyle: .default)
+        progressView.progress = 0.0
+        progressView.translatesAutoresizingMaskIntoConstraints = false
+        return progressView
+    }()
+
+    public override init(frame: CGRect) {
+        super.init(frame: frame)
+        setupSubviews()
+    }
+
+    public required init?(coder aDecoder: NSCoder) {
+        super.init(coder: aDecoder)
+        self.translatesAutoresizingMaskIntoConstraints = false
+        setupSubviews()
+    }
+
+    /// Responsible for setting up the constraints of the cell's subviews.
+    open func setupConstraints() {
+        playButton.constraintHeightTo(45, priority: UILayoutPriority(rawValue: 999)).isActive = true
+        playButton.constraintWidthTo(45, priority: UILayoutPriority(rawValue: 999)).isActive = true
+
+        let playButtonConstraints = [playButton.constraintCenterYTo(self),
+                                     playButton.constraintAlignLeadingTo(self)]
+        let durationLabelConstraints = [durationLabel.constraintAlignTrailingTo(self, paddingTrailing: 12),
+                                        durationLabel.constraintCenterYTo(self)]
+        self.addConstraints(playButtonConstraints)
+        self.addConstraints(durationLabelConstraints)
+
+        progressView.addConstraints(left: playButton.rightAnchor,
+                                    right: durationLabel.leftAnchor,
+                                    centerY: self.centerYAnchor,
+                                    leftConstant: 8,
+                                    rightConstant: 8)
+        let height = self.heightAnchor.constraint(equalTo: playButton.heightAnchor)
+        height.priority = .required
+        height.isActive = true
+    }
+
+    open func setupSubviews() {
+        self.addSubview(playButton)
+        self.addSubview(durationLabel)
+        self.addSubview(progressView)
+        setupConstraints()
+    }
+
+    open func reset() {
+        progressView.progress = 0
+        playButton.isSelected = false
+        durationLabel.text = "0:00"
+    }
+
+    open func setProgress(_ progress: Float) {
+        progressView.progress = progress
+    }
+
+    open func setDuration(duration: Double) {
+        var formattedTime = "0:00"
+        // print the time as 0:ss if duration is up to 59 seconds
+        // print the time as m:ss if duration is up to 59:59 seconds
+        // print the time as h:mm:ss for anything longer
+        if duration < 60 {
+            formattedTime = String(format: "0:%.02d", Int(duration.rounded(.up)))
+        } else if duration < 3600 {
+            formattedTime = String(format: "%.02d:%.02d", Int(duration/60), Int(duration) % 60)
+        } else {
+            let hours = Int(duration/3600)
+            let remainingMinutsInSeconds = Int(duration) - hours*3600
+            formattedTime = String(format: "%.02d:%.02d:%.02d", hours, Int(remainingMinutsInSeconds/60), Int(remainingMinutsInSeconds) % 60)
+        }
+
+        durationLabel.text = formattedTime
+    }
+
+    open func showPlayLayout(_ play: Bool) {
+        playButton.isSelected = play
+    }
+}

+ 0 - 13
deltachat-ios/MessageKit/Controllers/BasicAudioController.swift

@@ -25,19 +25,6 @@
 import UIKit
 import AVFoundation
 
-/// The `PlayerState` indicates the current audio controller state
-public enum PlayerState {
-
-    /// The audio controller is currently playing a sound
-    case playing
-
-    /// The audio controller is currently in pause state
-    case pause
-
-    /// The audio controller is not playing any sound and audioPlayer is nil
-    case stopped
-}
-
 /// The `BasicAudioController` update UI for current audio cell that is playing a sound
 /// and also creates and manage an `AVAudioPlayer` states, play, pause and stop.
 open class BasicAudioController: NSObject, AVAudioPlayerDelegate {