Browse Source

Merge pull request #337 from deltachat/custom_audio_cells

display audio chat messages
björn petersen 5 years ago
parent
commit
b8481bd7fa
34 changed files with 685 additions and 96 deletions
  1. BIN
      Assets.xcassets/disclouser.png
  2. BIN
      Assets.xcassets/disclouser@2x.png
  3. BIN
      Assets.xcassets/disclouser@3x.png
  4. BIN
      Assets.xcassets/pause.png
  5. BIN
      Assets.xcassets/pause@2x.png
  6. BIN
      Assets.xcassets/pause@3x.png
  7. BIN
      Assets.xcassets/play.png
  8. BIN
      Assets.xcassets/play@2x.png
  9. BIN
      Assets.xcassets/play@3x.png
  10. 12 0
      deltachat-ios.xcodeproj/project.pbxproj
  11. 23 0
      deltachat-ios/Assets.xcassets/disclouser.imageset/Contents.json
  12. BIN
      deltachat-ios/Assets.xcassets/disclouser.imageset/disclouser.png
  13. BIN
      deltachat-ios/Assets.xcassets/disclouser.imageset/disclouser@2x.png
  14. BIN
      deltachat-ios/Assets.xcassets/disclouser.imageset/disclouser@3x.png
  15. 6 0
      deltachat-ios/Assets.xcassets/ic_attach_file_black_36dp/Contents.json
  16. 23 0
      deltachat-ios/Assets.xcassets/pause.imageset/Contents.json
  17. BIN
      deltachat-ios/Assets.xcassets/pause.imageset/pause.png
  18. BIN
      deltachat-ios/Assets.xcassets/pause.imageset/pause@2x.png
  19. BIN
      deltachat-ios/Assets.xcassets/pause.imageset/pause@3x.png
  20. 23 0
      deltachat-ios/Assets.xcassets/play.imageset/Contents.json
  21. BIN
      deltachat-ios/Assets.xcassets/play.imageset/play.png
  22. BIN
      deltachat-ios/Assets.xcassets/play.imageset/play@2x.png
  23. BIN
      deltachat-ios/Assets.xcassets/play.imageset/play@3x.png
  24. 44 0
      deltachat-ios/Controller/ChatViewController.swift
  25. 47 20
      deltachat-ios/DC/Wrapper.swift
  26. 0 7
      deltachat-ios/Extensions/UIImage+Extension.swift
  27. 5 0
      deltachat-ios/Helper/Utils.swift
  28. 224 0
      deltachat-ios/MessageKit/Controllers/BasicAudioController.swift
  29. 50 4
      deltachat-ios/MessageKit/Layout/AudioMessageSizeCalculator.swift
  30. 2 0
      deltachat-ios/MessageKit/Protocols/AudioItem.swift
  31. 118 0
      deltachat-ios/MessageKit/Views/AudioPlayerView.swift
  32. 88 64
      deltachat-ios/MessageKit/Views/Cells/AudioMessageCell.swift
  33. 1 1
      deltachat-ios/MessageKit/Views/Cells/ContactMessageCell.swift
  34. 19 0
      deltachat-ios/Model/Audio.swift

BIN
Assets.xcassets/disclouser.png


BIN
Assets.xcassets/disclouser@2x.png


BIN
Assets.xcassets/disclouser@3x.png


BIN
Assets.xcassets/pause.png


BIN
Assets.xcassets/pause@2x.png


BIN
Assets.xcassets/pause@3x.png


BIN
Assets.xcassets/play.png


BIN
Assets.xcassets/play@2x.png


BIN
Assets.xcassets/play@3x.png


+ 12 - 0
deltachat-ios.xcodeproj/project.pbxproj

@@ -11,6 +11,9 @@
 		300C50A1234BDAB800F8AE22 /* TextMediaMessageSizeCalculator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 300C50A0234BDAB800F8AE22 /* TextMediaMessageSizeCalculator.swift */; };
 		300C50A1234BDAB800F8AE22 /* TextMediaMessageSizeCalculator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 300C50A0234BDAB800F8AE22 /* TextMediaMessageSizeCalculator.swift */; };
 		30149D9322F21129003C12B5 /* QrViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 30149D9222F21129003C12B5 /* QrViewController.swift */; };
 		30149D9322F21129003C12B5 /* QrViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 30149D9222F21129003C12B5 /* QrViewController.swift */; };
 		3022E6BE22E8768800763272 /* InfoPlist.strings in Resources */ = {isa = PBXBuildFile; fileRef = 3022E6C022E8768800763272 /* InfoPlist.strings */; };
 		3022E6BE22E8768800763272 /* InfoPlist.strings in Resources */ = {isa = PBXBuildFile; fileRef = 3022E6C022E8768800763272 /* InfoPlist.strings */; };
+		3040F45E234DFBC000FA34D5 /* Audio.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3040F45D234DFBC000FA34D5 /* Audio.swift */; };
+		3040F460234F419400FA34D5 /* BasicAudioController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3040F45F234F419300FA34D5 /* BasicAudioController.swift */; };
+		3040F462234F550300FA34D5 /* AudioPlayerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3040F461234F550300FA34D5 /* AudioPlayerView.swift */; };
 		305961CC2346125100C80F33 /* UIView+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 305961822346125000C80F33 /* UIView+Extensions.swift */; };
 		305961CC2346125100C80F33 /* UIView+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 305961822346125000C80F33 /* UIView+Extensions.swift */; };
 		305961CD2346125100C80F33 /* UIEdgeInsets+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 305961832346125000C80F33 /* UIEdgeInsets+Extensions.swift */; };
 		305961CD2346125100C80F33 /* UIEdgeInsets+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 305961832346125000C80F33 /* UIEdgeInsets+Extensions.swift */; };
 		305961CF2346125100C80F33 /* UIColor+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 305961852346125000C80F33 /* UIColor+Extensions.swift */; };
 		305961CF2346125100C80F33 /* UIColor+Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 305961852346125000C80F33 /* UIColor+Extensions.swift */; };
@@ -174,6 +177,9 @@
 		3022E6D122E8769E00763272 /* lt */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = lt; path = lt.lproj/InfoPlist.strings; sourceTree = "<group>"; };
 		3022E6D122E8769E00763272 /* lt */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = lt; path = lt.lproj/InfoPlist.strings; sourceTree = "<group>"; };
 		3022E6D222E8769F00763272 /* zh-Hant-TW */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hant-TW"; path = "zh-Hant-TW.lproj/InfoPlist.strings"; sourceTree = "<group>"; };
 		3022E6D222E8769F00763272 /* zh-Hant-TW */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hant-TW"; path = "zh-Hant-TW.lproj/InfoPlist.strings"; sourceTree = "<group>"; };
 		3022E6D322E876A100763272 /* uk */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = uk; path = uk.lproj/InfoPlist.strings; sourceTree = "<group>"; };
 		3022E6D322E876A100763272 /* uk */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = uk; path = uk.lproj/InfoPlist.strings; sourceTree = "<group>"; };
+		3040F45D234DFBC000FA34D5 /* Audio.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Audio.swift; sourceTree = "<group>"; };
+		3040F45F234F419300FA34D5 /* BasicAudioController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = BasicAudioController.swift; sourceTree = "<group>"; };
+		3040F461234F550300FA34D5 /* AudioPlayerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioPlayerView.swift; sourceTree = "<group>"; };
 		305961822346125000C80F33 /* UIView+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "UIView+Extensions.swift"; sourceTree = "<group>"; };
 		305961822346125000C80F33 /* UIView+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "UIView+Extensions.swift"; sourceTree = "<group>"; };
 		305961832346125000C80F33 /* UIEdgeInsets+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "UIEdgeInsets+Extensions.swift"; sourceTree = "<group>"; };
 		305961832346125000C80F33 /* UIEdgeInsets+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "UIEdgeInsets+Extensions.swift"; sourceTree = "<group>"; };
 		305961852346125000C80F33 /* UIColor+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "UIColor+Extensions.swift"; sourceTree = "<group>"; };
 		305961852346125000C80F33 /* UIColor+Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "UIColor+Extensions.swift"; sourceTree = "<group>"; };
@@ -410,6 +416,7 @@
 		305961892346125000C80F33 /* Controllers */ = {
 		305961892346125000C80F33 /* Controllers */ = {
 			isa = PBXGroup;
 			isa = PBXGroup;
 			children = (
 			children = (
+				3040F45F234F419300FA34D5 /* BasicAudioController.swift */,
 				3059618A2346125000C80F33 /* MessagesViewController+Keyboard.swift */,
 				3059618A2346125000C80F33 /* MessagesViewController+Keyboard.swift */,
 				3059618B2346125000C80F33 /* MessagesViewController.swift */,
 				3059618B2346125000C80F33 /* MessagesViewController.swift */,
 				3059618C2346125000C80F33 /* MessagesViewController+Menu.swift */,
 				3059618C2346125000C80F33 /* MessagesViewController+Menu.swift */,
@@ -479,6 +486,7 @@
 				305961BE2346125100C80F33 /* MessagesCollectionView.swift */,
 				305961BE2346125100C80F33 /* MessagesCollectionView.swift */,
 				305961BF2346125100C80F33 /* PlayButtonView.swift */,
 				305961BF2346125100C80F33 /* PlayButtonView.swift */,
 				305961C02346125100C80F33 /* BubbleCircle.swift */,
 				305961C02346125100C80F33 /* BubbleCircle.swift */,
+				3040F461234F550300FA34D5 /* AudioPlayerView.swift */,
 			);
 			);
 			path = Views;
 			path = Views;
 			sourceTree = "<group>";
 			sourceTree = "<group>";
@@ -629,6 +637,7 @@
 				AEACE2DE1FB3246400DCDD78 /* Message.swift */,
 				AEACE2DE1FB3246400DCDD78 /* Message.swift */,
 				AE851AC6227C776400ED86F0 /* Location.swift */,
 				AE851AC6227C776400ED86F0 /* Location.swift */,
 				AE851AC8227C77CF00ED86F0 /* Media.swift */,
 				AE851AC8227C77CF00ED86F0 /* Media.swift */,
+				3040F45D234DFBC000FA34D5 /* Audio.swift */,
 			);
 			);
 			path = Model;
 			path = Model;
 			sourceTree = "<group>";
 			sourceTree = "<group>";
@@ -1011,8 +1020,10 @@
 				305961EA2346125100C80F33 /* MessageStyle.swift in Sources */,
 				305961EA2346125100C80F33 /* MessageStyle.swift in Sources */,
 				305961F92346125100C80F33 /* MessageLabel.swift in Sources */,
 				305961F92346125100C80F33 /* MessageLabel.swift in Sources */,
 				305961FA2346125100C80F33 /* MessageReusableView.swift in Sources */,
 				305961FA2346125100C80F33 /* MessageReusableView.swift in Sources */,
+				3040F462234F550300FA34D5 /* AudioPlayerView.swift in Sources */,
 				AE52EA19229EB53C00C586C9 /* ContactDetailHeader.swift in Sources */,
 				AE52EA19229EB53C00C586C9 /* ContactDetailHeader.swift in Sources */,
 				78E45E4421D3F14A00D4B15E /* UIImage+Extension.swift in Sources */,
 				78E45E4421D3F14A00D4B15E /* UIImage+Extension.swift in Sources */,
+				3040F460234F419400FA34D5 /* BasicAudioController.swift in Sources */,
 				305962082346125100C80F33 /* MediaMessageSizeCalculator.swift in Sources */,
 				305962082346125100C80F33 /* MediaMessageSizeCalculator.swift in Sources */,
 				AE52EA20229EB9F000C586C9 /* EditGroupViewController.swift in Sources */,
 				AE52EA20229EB9F000C586C9 /* EditGroupViewController.swift in Sources */,
 				70B08FCD21073B910097D3EA /* NewGroupMemberChoiceController.swift in Sources */,
 				70B08FCD21073B910097D3EA /* NewGroupMemberChoiceController.swift in Sources */,
@@ -1021,6 +1032,7 @@
 				78ED838D21D577D000243125 /* events.swift in Sources */,
 				78ED838D21D577D000243125 /* events.swift in Sources */,
 				305961FD2346125100C80F33 /* TypingBubble.swift in Sources */,
 				305961FD2346125100C80F33 /* TypingBubble.swift in Sources */,
 				305961D72346125100C80F33 /* MessageKit+Availability.swift in Sources */,
 				305961D72346125100C80F33 /* MessageKit+Availability.swift in Sources */,
+				3040F45E234DFBC000FA34D5 /* Audio.swift in Sources */,
 				305961FE2346125100C80F33 /* InsetLabel.swift in Sources */,
 				305961FE2346125100C80F33 /* InsetLabel.swift in Sources */,
 				B21005DB23383664004C70C5 /* SettingsClassicViewController.swift in Sources */,
 				B21005DB23383664004C70C5 /* SettingsClassicViewController.swift in Sources */,
 				305961F62346125100C80F33 /* MessageContentCell.swift in Sources */,
 				305961F62346125100C80F33 /* MessageContentCell.swift in Sources */,

+ 23 - 0
deltachat-ios/Assets.xcassets/disclouser.imageset/Contents.json

@@ -0,0 +1,23 @@
+{
+  "images" : [
+    {
+      "idiom" : "universal",
+      "filename" : "disclouser.png",
+      "scale" : "1x"
+    },
+    {
+      "idiom" : "universal",
+      "filename" : "disclouser@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "idiom" : "universal",
+      "filename" : "disclouser@3x.png",
+      "scale" : "3x"
+    }
+  ],
+  "info" : {
+    "version" : 1,
+    "author" : "xcode"
+  }
+}

BIN
deltachat-ios/Assets.xcassets/disclouser.imageset/disclouser.png


BIN
deltachat-ios/Assets.xcassets/disclouser.imageset/disclouser@2x.png


BIN
deltachat-ios/Assets.xcassets/disclouser.imageset/disclouser@3x.png


+ 6 - 0
deltachat-ios/Assets.xcassets/ic_attach_file_black_36dp/Contents.json

@@ -0,0 +1,6 @@
+{
+  "info" : {
+    "version" : 1,
+    "author" : "xcode"
+  }
+}

+ 23 - 0
deltachat-ios/Assets.xcassets/pause.imageset/Contents.json

@@ -0,0 +1,23 @@
+{
+  "images" : [
+    {
+      "idiom" : "universal",
+      "filename" : "pause.png",
+      "scale" : "1x"
+    },
+    {
+      "idiom" : "universal",
+      "filename" : "pause@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "idiom" : "universal",
+      "filename" : "pause@3x.png",
+      "scale" : "3x"
+    }
+  ],
+  "info" : {
+    "version" : 1,
+    "author" : "xcode"
+  }
+}

BIN
deltachat-ios/Assets.xcassets/pause.imageset/pause.png


BIN
deltachat-ios/Assets.xcassets/pause.imageset/pause@2x.png


BIN
deltachat-ios/Assets.xcassets/pause.imageset/pause@3x.png


+ 23 - 0
deltachat-ios/Assets.xcassets/play.imageset/Contents.json

@@ -0,0 +1,23 @@
+{
+  "images" : [
+    {
+      "idiom" : "universal",
+      "filename" : "play.png",
+      "scale" : "1x"
+    },
+    {
+      "idiom" : "universal",
+      "filename" : "play@2x.png",
+      "scale" : "2x"
+    },
+    {
+      "idiom" : "universal",
+      "filename" : "play@3x.png",
+      "scale" : "3x"
+    }
+  ],
+  "info" : {
+    "version" : 1,
+    "author" : "xcode"
+  }
+}

BIN
deltachat-ios/Assets.xcassets/play.imageset/play.png


BIN
deltachat-ios/Assets.xcassets/play.imageset/play@2x.png


BIN
deltachat-ios/Assets.xcassets/play.imageset/play@3x.png


+ 44 - 0
deltachat-ios/Controller/ChatViewController.swift

@@ -2,6 +2,7 @@ import MapKit
 import QuickLook
 import QuickLook
 import UIKit
 import UIKit
 import InputBarAccessoryView
 import InputBarAccessoryView
+import AVFoundation
 
 
 protocol MediaSendHandler {
 protocol MediaSendHandler {
     func onSuccess()
     func onSuccess()
@@ -31,6 +32,9 @@ class ChatViewController: MessagesViewController {
         UITapGestureRecognizer(target: self, action: #selector(chatProfilePressed))
         UITapGestureRecognizer(target: self, action: #selector(chatProfilePressed))
     }()
     }()
 
 
+    /// The `BasicAudioController` controll the AVAudioPlayer state (play, pause, stop) and udpate audio cell UI accordingly.
+    open lazy var audioController = BasicAudioController(messageCollectionView: messagesCollectionView)
+
     var disableWriting = false
     var disableWriting = false
     var showCustomNavBar = true
     var showCustomNavBar = true
     var previewView: UIView?
     var previewView: UIView?
@@ -158,6 +162,7 @@ class ChatViewController: MessagesViewController {
         if let incomingMsgObserver = self.incomingMsgObserver {
         if let incomingMsgObserver = self.incomingMsgObserver {
             nc.removeObserver(incomingMsgObserver)
             nc.removeObserver(incomingMsgObserver)
         }
         }
+        audioController.stopAnyOngoingPlaying()
     }
     }
 
 
     @objc
     @objc
@@ -901,6 +906,45 @@ extension ChatViewController: MessageCellDelegate {
         print("Bottom label tapped")
         print("Bottom label tapped")
     }
     }
 
 
+    func didTapPlayButton(in cell: AudioMessageCell) {
+        guard let indexPath = messagesCollectionView.indexPath(for: cell),
+            let message = messagesCollectionView.messagesDataSource?.messageForItem(at: indexPath, in: messagesCollectionView) else {
+                print("Failed to identify message when audio cell receive tap gesture")
+                return
+        }
+        guard audioController.state != .stopped else {
+            // There is no audio sound playing - prepare to start playing for given audio message
+            audioController.playSound(for: message, in: cell)
+            return
+        }
+        if audioController.playingMessage?.messageId == message.messageId {
+            // tap occur in the current cell that is playing audio sound
+            if audioController.state == .playing {
+                audioController.pauseSound(for: message, in: cell)
+            } else {
+                audioController.resumeSound()
+            }
+        } else {
+            // tap occur in a difference cell that the one is currently playing sound. First stop currently playing and start the sound for given message
+            audioController.stopAnyOngoingPlaying()
+            audioController.playSound(for: message, in: cell)
+        }
+    }
+
+
+    func didStartAudio(in cell: AudioMessageCell) {
+        print("audio started")
+    }
+
+    func didStopAudio(in cell: AudioMessageCell) {
+        print("audio stopped")
+    }
+
+    func didPauseAudio(in cell: AudioMessageCell) {
+        print("audio paused")
+    }
+
+
     @objc func didTapBackground(in cell: MessageCollectionViewCell) {
     @objc func didTapBackground(in cell: MessageCollectionViewCell) {
         print("background of message tapped")
         print("background of message tapped")
     }
     }

+ 47 - 20
deltachat-ios/DC/Wrapper.swift

@@ -1,5 +1,6 @@
 import Foundation
 import Foundation
 import UIKit
 import UIKit
+import AVFoundation
 
 
 class DcContext {
 class DcContext {
     let contextPointer: OpaquePointer?
     let contextPointer: OpaquePointer?
@@ -473,36 +474,62 @@ class DcMsg: MessageType {
 
 
         switch self.viewtype! {
         switch self.viewtype! {
         case .image:
         case .image:
-            if text.isEmpty {
-                return MessageKind.photo(Media(image: image))
-            }
-            let attributedString = NSAttributedString(string: text, attributes: [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 16.0)])
-            return MessageKind.photoText(Media(image: image, text: attributedString))
+            return createImageMessage(text: text)
         case .video:
         case .video:
-            if text.isEmpty {
-                return MessageKind.video(Media(url: fileURL))
-            }
-            let attributedString = NSAttributedString(string: text, attributes: [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 16.0)])
-            return MessageKind.videoText(Media(url: fileURL, text: attributedString))
+            return createVideoMessage(text: text)
+        case .voice, .audio:
+            return createAudioMessage(text: text)
         default:
         default:
             // TODO: custom views for audio, etc
             // TODO: custom views for audio, etc
             if let filename = self.filename {
             if let filename = self.filename {
-                let fileSize = self.filesize / 1024
-                let fileString = "\(self.filename ?? "???") (\(self.filesize / 1024) kB)"
-                let attributedFileString = NSMutableAttributedString(string: fileString,
-                                                                     attributes: [NSAttributedString.Key.font: UIFont.italicSystemFont(ofSize: 13.0)])
-                if !text.isEmpty {
-                    attributedFileString.append(NSAttributedString(string: "\n\n",
-                                                                   attributes: [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 7.0)]))
-                    attributedFileString.append(NSAttributedString(string: text,
-                                                                   attributes: [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 16.0)]))
+                if Utils.hasAudioSuffix(url: fileURL!) {
+                   return createAudioMessage(text: text)
                 }
                 }
-                return MessageKind.fileText(Media(text: attributedFileString))
+                return createFileMessage(text: text)
             }
             }
             return MessageKind.text(text)
             return MessageKind.text(text)
         }
         }
     }()
     }()
 
 
+    internal func createVideoMessage(text: String) -> MessageKind {
+        if text.isEmpty {
+                       return MessageKind.video(Media(url: fileURL))
+                   }
+                   let attributedString = NSAttributedString(string: text, attributes: [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 16.0)])
+                   return MessageKind.videoText(Media(url: fileURL, text: attributedString))
+    }
+
+    internal func createImageMessage(text: String) -> MessageKind {
+        if text.isEmpty {
+            return MessageKind.photo(Media(image: image))
+        }
+        let attributedString = NSAttributedString(string: text, attributes: [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 16.0)])
+        return MessageKind.photoText(Media(image: image, text: attributedString))
+    }
+
+    internal func createAudioMessage(text: String) -> MessageKind {
+        let audioAsset = AVURLAsset(url: fileURL!)
+        let seconds = Float(CMTimeGetSeconds(audioAsset.duration))
+        if !text.isEmpty {
+            let attributedString = NSAttributedString(string: text, attributes: [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 16.0)])
+            return MessageKind.audio(Audio(url: audioAsset.url, duration: seconds, text: attributedString))
+        }
+        return MessageKind.audio(Audio(url: fileURL!, duration: seconds))
+    }
+
+    internal func createFileMessage(text: String) -> MessageKind {
+        let fileString = "\(self.filename ?? "???") (\(self.filesize / 1024) kB)"
+        let attributedFileString = NSMutableAttributedString(string: fileString,
+                                                             attributes: [NSAttributedString.Key.font: UIFont.italicSystemFont(ofSize: 13.0)])
+        if !text.isEmpty {
+            attributedFileString.append(NSAttributedString(string: "\n\n",
+                                                           attributes: [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 7.0)]))
+            attributedFileString.append(NSAttributedString(string: text,
+                                                           attributes: [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 16.0)]))
+        }
+        return MessageKind.fileText(Media(text: attributedFileString))
+    }
+
     var messageId: String {
     var messageId: String {
         return "\(id)"
         return "\(id)"
     }
     }

+ 0 - 7
deltachat-ios/Extensions/UIImage+Extension.swift

@@ -95,13 +95,6 @@ extension UIImage {
         return UIImage(data: imageData!)
         return UIImage(data: imageData!)
     }
     }
 
 
-    public class func messageKitImageWith(type: ImageType) -> UIImage? {
-        let assetBundle = Bundle.messageKitAssetBundle()
-        let imagePath = assetBundle.path(forResource: type.rawValue, ofType: "png", inDirectory: "Images")
-        let image = UIImage(contentsOfFile: imagePath ?? "")
-        return image
-    }
-
 }
 }
 
 
 public enum ImageType: String {
 public enum ImageType: String {

+ 5 - 0
deltachat-ios/Helper/Utils.swift

@@ -141,6 +141,11 @@ struct Utils {
         }
         }
     }
     }
 
 
+    static func hasAudioSuffix(url: URL) -> Bool {
+        ///TODO: add more file suffixes
+        return url.absoluteString.hasSuffix("wav")
+    }
+
     static func generateThumbnailFromVideo(url: URL) -> UIImage? {
     static func generateThumbnailFromVideo(url: URL) -> UIImage? {
         do {
         do {
             let asset = AVURLAsset(url: url)
             let asset = AVURLAsset(url: url)

+ 224 - 0
deltachat-ios/MessageKit/Controllers/BasicAudioController.swift

@@ -0,0 +1,224 @@
+/*
+ MIT License
+
+ Copyright (c) 2017-2019 MessageKit
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+ */
+
+import UIKit
+import AVFoundation
+
+/// The `PlayerState` indicates the current audio controller state
+public enum PlayerState {
+
+    /// The audio controller is currently playing a sound
+    case playing
+
+    /// The audio controller is currently in pause state
+    case pause
+
+    /// The audio controller is not playing any sound and audioPlayer is nil
+    case stopped
+}
+
+/// The `BasicAudioController` update UI for current audio cell that is playing a sound
+/// and also creates and manage an `AVAudioPlayer` states, play, pause and stop.
+open class BasicAudioController: NSObject, AVAudioPlayerDelegate {
+
+    /// The `AVAudioPlayer` that is playing the sound
+    open var audioPlayer: AVAudioPlayer?
+
+    /// The `AudioMessageCell` that is currently playing sound
+    open weak var playingCell: AudioMessageCell?
+
+    /// The `MessageType` that is currently playing sound
+    open var playingMessage: MessageType?
+
+    /// Specify if current audio controller state: playing, in pause or none
+    open private(set) var state: PlayerState = .stopped
+
+    // The `MessagesCollectionView` where the playing cell exist
+    public weak var messageCollectionView: MessagesCollectionView?
+
+    /// The `Timer` that update playing progress
+    internal var progressTimer: Timer?
+
+    // MARK: - Init Methods
+
+    public init(messageCollectionView: MessagesCollectionView) {
+        self.messageCollectionView = messageCollectionView
+        super.init()
+    }
+
+    // MARK: - Methods
+
+    /// Used to configure the audio cell UI:
+    ///     1. play button selected state;
+    ///     2. progresssView progress;
+    ///     3. durationLabel text;
+    ///
+    /// - Parameters:
+    ///   - cell: The `AudioMessageCell` that needs to be configure.
+    ///   - message: The `MessageType` that configures the cell.
+    ///
+    /// - Note:
+    ///   This protocol method is called by MessageKit every time an audio cell needs to be configure
+    open func configureAudioCell(_ cell: AudioMessageCell, message: MessageType) {
+        if playingMessage?.messageId == message.messageId, let collectionView = messageCollectionView, let player = audioPlayer {
+            playingCell = cell
+            cell.audioPlayerView.setProgress((player.duration == 0) ? 0 : Float(player.currentTime/player.duration))
+            cell.audioPlayerView.showPlayLayout((player.isPlaying == true) ? true : false)
+            guard let displayDelegate = collectionView.messagesDisplayDelegate else {
+                fatalError("MessagesDisplayDelegate has not been set.")
+            }
+            cell.audioPlayerView.setDuration(formattedText: displayDelegate.audioProgressTextFormat(Float(player.currentTime),
+                                                                                                    for: cell,
+                                                                                                    in: collectionView))
+        }
+    }
+
+    /// Used to start play audio sound
+    ///
+    /// - Parameters:
+    ///   - message: The `MessageType` that contain the audio item to be played.
+    ///   - audioCell: The `AudioMessageCell` that needs to be updated while audio is playing.
+    open func playSound(for message: MessageType, in audioCell: AudioMessageCell) {
+        switch message.kind {
+        case .audio(let item):
+            playingCell = audioCell
+            playingMessage = message
+            guard let player = try? AVAudioPlayer(contentsOf: item.url) else {
+                print("Failed to create audio player for URL: \(item.url)")
+                return
+            }
+            audioPlayer = player
+            audioPlayer?.prepareToPlay()
+            audioPlayer?.delegate = self
+            audioPlayer?.play()
+            state = .playing
+            audioCell.audioPlayerView.showPlayLayout(true)  // show pause button on audio cell
+            startProgressTimer()
+            audioCell.delegate?.didStartAudio(in: audioCell)
+        default:
+            print("BasicAudioPlayer failed play sound becasue given message kind is not Audio")
+        }
+    }
+
+    /// Used to pause the audio sound
+    ///
+    /// - Parameters:
+    ///   - message: The `MessageType` that contain the audio item to be pause.
+    ///   - audioCell: The `AudioMessageCell` that needs to be updated by the pause action.
+    open func pauseSound(for message: MessageType, in audioCell: AudioMessageCell) {
+        audioPlayer?.pause()
+        state = .pause
+        audioCell.audioPlayerView.showPlayLayout(false) // show play button on audio cell
+        progressTimer?.invalidate()
+        if let cell = playingCell {
+            cell.delegate?.didPauseAudio(in: cell)
+        }
+    }
+
+    /// Stops any ongoing audio playing if exists
+    open func stopAnyOngoingPlaying() {
+        // If the audio player is nil then we don't need to go through the stopping logic
+        guard let player = audioPlayer, let collectionView = messageCollectionView else { return }
+        player.stop()
+        state = .stopped
+        if let cell = playingCell {
+            cell.audioPlayerView.setProgress(0.0)
+            cell.audioPlayerView.showPlayLayout(false)
+            guard let displayDelegate = collectionView.messagesDisplayDelegate else {
+                fatalError("MessagesDisplayDelegate has not been set.")
+            }
+            cell.audioPlayerView.setDuration(formattedText: displayDelegate.audioProgressTextFormat(Float(player.duration),
+                                                                                                    for: cell,
+                                                                                                    in: collectionView))
+            cell.delegate?.didStopAudio(in: cell)
+        }
+        progressTimer?.invalidate()
+        progressTimer = nil
+        audioPlayer = nil
+        playingMessage = nil
+        playingCell = nil
+    }
+
+    /// Resume a currently pause audio sound
+    open func resumeSound() {
+        guard let player = audioPlayer, let cell = playingCell else {
+            stopAnyOngoingPlaying()
+            return
+        }
+        player.prepareToPlay()
+        player.play()
+        state = .playing
+        startProgressTimer()
+        cell.audioPlayerView.showPlayLayout(true) // show pause button on audio cell
+        cell.delegate?.didStartAudio(in: cell)
+    }
+
+    // MARK: - Fire Methods
+    @objc private func didFireProgressTimer(_ timer: Timer) {
+        guard let player = audioPlayer, let collectionView = messageCollectionView, let cell = playingCell else {
+            return
+        }
+        // check if can update playing cell
+        if let playingCellIndexPath = collectionView.indexPath(for: cell) {
+            // 1. get the current message that decorates the playing cell
+            // 2. check if current message is the same with playing message, if so then update the cell content
+            // Note: Those messages differ in the case of cell reuse
+            let currentMessage = collectionView.messagesDataSource?.messageForItem(at: playingCellIndexPath, in: collectionView)
+            if currentMessage != nil && currentMessage?.messageId == playingMessage?.messageId {
+                // messages are the same update cell content
+                cell.audioPlayerView.setProgress((player.duration == 0) ? 0 : Float(player.currentTime/player.duration))
+                guard let displayDelegate = collectionView.messagesDisplayDelegate else {
+                    fatalError("MessagesDisplayDelegate has not been set.")
+                }
+                cell.audioPlayerView.setDuration(formattedText: displayDelegate.audioProgressTextFormat(Float(player.currentTime),
+                                                                                                        for: cell,
+                                                                                                        in: collectionView))
+            } else {
+                // if the current message is not the same with playing message stop playing sound
+                stopAnyOngoingPlaying()
+            }
+        }
+    }
+
+    // MARK: - Private Methods
+    private func startProgressTimer() {
+        progressTimer?.invalidate()
+        progressTimer = nil
+        progressTimer = Timer.scheduledTimer(timeInterval: 0.1,
+                                             target: self,
+                                             selector: #selector(BasicAudioController.didFireProgressTimer(_:)),
+                                             userInfo: nil,
+                                             repeats: true)
+    }
+
+    // MARK: - AVAudioPlayerDelegate
+    open func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
+        stopAnyOngoingPlaying()
+    }
+
+    open func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) {
+        stopAnyOngoingPlaying()
+    }
+
+}

+ 50 - 4
deltachat-ios/MessageKit/Layout/AudioMessageSizeCalculator.swift

@@ -27,18 +27,64 @@ import UIKit
 
 
 open class AudioMessageSizeCalculator: MessageSizeCalculator {
 open class AudioMessageSizeCalculator: MessageSizeCalculator {
 
 
+    public var incomingMessageLabelInsets = UIEdgeInsets(top: AudioMessageCell.insetTop,
+                                                         left: AudioMessageCell.insetHorizontalBig,
+                                                         bottom: AudioMessageCell.insetBottom,
+                                                         right: AudioMessageCell.insetHorizontalSmall)
+   public var outgoingMessageLabelInsets = UIEdgeInsets(top: AudioMessageCell.insetTop,
+                                                        left: AudioMessageCell.insetHorizontalSmall,
+                                                        bottom: AudioMessageCell.insetBottom,
+                                                        right: AudioMessageCell.insetHorizontalBig)
+
+    public var messageLabelFont = UIFont.preferredFont(forTextStyle: .body)
+
+
+    internal func messageLabelInsets(for message: MessageType) -> UIEdgeInsets {
+        let dataSource = messagesLayout.messagesDataSource
+        let isFromCurrentSender = dataSource.isFromCurrentSender(message: message)
+        return isFromCurrentSender ? outgoingMessageLabelInsets : incomingMessageLabelInsets
+    }
+
     open override func messageContainerSize(for message: MessageType) -> CGSize {
     open override func messageContainerSize(for message: MessageType) -> CGSize {
         switch message.kind {
         switch message.kind {
         case .audio(let item):
         case .audio(let item):
             let maxWidth = messageContainerMaxWidth(for: message)
             let maxWidth = messageContainerMaxWidth(for: message)
-            if maxWidth < item.size.width {
+            var itemWidth = item.size.width
+            var itemHeight = item.size.height
+
+            if maxWidth < itemWidth {
                 // Maintain the ratio if width is too great
                 // Maintain the ratio if width is too great
-                let height = maxWidth * item.size.height / item.size.width
-                return CGSize(width: maxWidth, height: height)
+                itemHeight = maxWidth * item.size.height / item.size.width
+                itemWidth = maxWidth
+            }
+
+            let maxTextWidth = itemWidth - self.messageLabelInsets(for: message).horizontal
+            var messageContainerSize = CGSize(width: itemWidth, height: itemHeight)
+            if let text = item.text {
+                let textHeight = text.height(withConstrainedWidth: maxTextWidth)
+                messageContainerSize.height += textHeight
+                messageContainerSize.height +=  self.messageLabelInsets(for: message).vertical
             }
             }
-            return item.size
+            return messageContainerSize
         default:
         default:
             fatalError("messageContainerSize received unhandled MessageDataType: \(message.kind)")
             fatalError("messageContainerSize received unhandled MessageDataType: \(message.kind)")
         }
         }
     }
     }
+
+    open override func configure(attributes: UICollectionViewLayoutAttributes) {
+        super.configure(attributes: attributes)
+        guard let attributes = attributes as? MessagesCollectionViewLayoutAttributes else { return }
+
+        let dataSource = messagesLayout.messagesDataSource
+        let indexPath = attributes.indexPath
+        let message = dataSource.messageForItem(at: indexPath, in: messagesLayout.messagesCollectionView)
+
+        switch message.kind {
+        case .audio:
+            attributes.messageLabelInsets = messageLabelInsets(for: message)
+            attributes.messageLabelFont = messageLabelFont
+        default:
+            break
+        }
+    }
 }
 }

+ 2 - 0
deltachat-ios/MessageKit/Protocols/AudioItem.swift

@@ -37,4 +37,6 @@ public protocol AudioItem {
     /// The size of the audio item.
     /// The size of the audio item.
     var size: CGSize { get }
     var size: CGSize { get }
 
 
+    /// Additional text
+    var text: NSAttributedString? { get }
 }
 }

+ 118 - 0
deltachat-ios/MessageKit/Views/AudioPlayerView.swift

@@ -0,0 +1,118 @@
+//
+//  AudioPlayerView.swift
+//  deltachat-ios
+//
+//  Created by Macci on 10.10.19.
+//  Copyright © 2019 Jonas Reinsch. All rights reserved.
+//
+
+import Foundation
+import UIKit
+
+open class AudioPlayerView: UIView {
+    //open weak var playerDelegate: AudioPlayerDelegate?
+
+    /// The play button view to display on audio messages.
+    private lazy var playButton: UIButton = {
+        let playButton = UIButton(type: .custom)
+        let playImage = UIImage(named: "play")
+        let pauseImage = UIImage(named: "pause")
+        playButton.setImage(playImage?.withRenderingMode(.alwaysTemplate), for: .normal)
+        playButton.setImage(pauseImage?.withRenderingMode(.alwaysTemplate), for: .selected)
+        playButton.translatesAutoresizingMaskIntoConstraints = false
+        return playButton
+    }()
+
+    /// The time duration lable to display on audio messages.
+    private lazy var durationLabel: UILabel = {
+        let durationLabel = UILabel(frame: CGRect.zero)
+        durationLabel.textAlignment = .right
+        durationLabel.font = UIFont.systemFont(ofSize: 14)
+        durationLabel.text = "0:00"
+        durationLabel.translatesAutoresizingMaskIntoConstraints = false
+        return durationLabel
+    }()
+
+    private lazy var progressView: UIProgressView = {
+        let progressView = UIProgressView(progressViewStyle: .default)
+        progressView.progress = 0.0
+        progressView.translatesAutoresizingMaskIntoConstraints = false
+        return progressView
+    }()
+
+    public override init(frame: CGRect) {
+        super.init(frame: frame)
+        setupSubviews()
+    }
+
+    public required init?(coder aDecoder: NSCoder) {
+        super.init(coder: aDecoder)
+        self.translatesAutoresizingMaskIntoConstraints = false
+        setupSubviews()
+    }
+
+    /// Responsible for setting up the constraints of the cell's subviews.
+    open func setupConstraints() {
+        playButton.constraint(equalTo: CGSize(width: 35, height: 35))
+
+        let playButtonConstraints = [playButton.constraintCenterYTo(self),
+                                     playButton.constraintAlignLeadingTo(self, paddingLeading: 12)]
+        let durationLabelConstraints = [durationLabel.constraintAlignTrailingTo(self, paddingTrailing: 12),
+                                        durationLabel.constraintCenterYTo(self)]
+        self.addConstraints(playButtonConstraints)
+        self.addConstraints(durationLabelConstraints)
+
+        progressView.addConstraints(left: playButton.rightAnchor,
+                                    right: durationLabel.leftAnchor,
+                                    centerY: self.centerYAnchor,
+                                    leftConstant: 8,
+                                    rightConstant: 8)
+    }
+
+    open func setupSubviews() {
+        self.addSubview(playButton)
+        self.addSubview(durationLabel)
+        self.addSubview(progressView)
+        setupConstraints()
+    }
+
+    open func reset() {
+        progressView.progress = 0
+        playButton.isSelected = false
+        durationLabel.text = "0:00"
+    }
+
+    open func didTapPlayButton(_ gesture: UIGestureRecognizer) -> Bool {
+        let touchLocation = gesture.location(in: self)
+        // compute play button touch area, currently play button size is (25, 25) which is hardly touchable
+        // add 10 px around current button frame and test the touch against this new frame
+        let playButtonTouchArea = CGRect(playButton.frame.origin.x - 10.0,
+                                         playButton.frame.origin.y - 10,
+                                         playButton.frame.size.width + 20,
+                                         playButton.frame.size.height + 20)
+        let translateTouchLocation = convert(touchLocation, to: self)
+        if playButtonTouchArea.contains(translateTouchLocation) {
+            return true
+        } else {
+            return false
+        }
+    }
+
+    open func setTintColor(_ color: UIColor) {
+        playButton.imageView?.tintColor = tintColor
+        durationLabel.textColor = tintColor
+        progressView.tintColor = tintColor
+    }
+
+    open func setProgress(_ progress: Float) {
+        progressView.progress = progress
+    }
+
+    open func setDuration(formattedText: String) {
+        durationLabel.text = formattedText
+    }
+
+    open func showPlayLayout(_ play: Bool) {
+        playButton.isSelected = play
+    }
+}

+ 88 - 64
deltachat-ios/MessageKit/Views/Cells/AudioMessageCell.swift

@@ -28,71 +28,72 @@ import AVFoundation
 /// A subclass of `MessageContentCell` used to display video and audio messages.
 /// A subclass of `MessageContentCell` used to display video and audio messages.
 open class AudioMessageCell: MessageContentCell {
 open class AudioMessageCell: MessageContentCell {
 
 
-    /// The play button view to display on audio messages.
-    public lazy var playButton: UIButton = {
-        let playButton = UIButton(type: .custom)
-        let playImage = UIImage.messageKitImageWith(type: .play)
-        let pauseImage = UIImage.messageKitImageWith(type: .pause)
-        playButton.setImage(playImage?.withRenderingMode(.alwaysTemplate), for: .normal)
-        playButton.setImage(pauseImage?.withRenderingMode(.alwaysTemplate), for: .selected)
-        return playButton
-    }()
+    public static let insetTop: CGFloat = 12
+    public static let insetBottom: CGFloat = 12
+    public static let insetHorizontalBig: CGFloat = 23
+    public static let insetHorizontalSmall: CGFloat = 12
+
+    // MARK: - Properties
+    /// The `MessageCellDelegate` for the cell.
+    open override weak var delegate: MessageCellDelegate? {
+        didSet {
+            messageLabel.delegate = delegate
+        }
+    }
 
 
-    /// The time duration lable to display on audio messages.
-    public lazy var durationLabel: UILabel = {
-        let durationLabel = UILabel(frame: CGRect.zero)
-        durationLabel.textAlignment = .right
-        durationLabel.font = UIFont.systemFont(ofSize: 14)
-        durationLabel.text = "0:00"
-        return durationLabel
-    }()
+    /// The label used to display the message's text.
+    open var messageLabel = MessageLabel()
 
 
-    public lazy var progressView: UIProgressView = {
-        let progressView = UIProgressView(progressViewStyle: .default)
-        progressView.progress = 0.0
-        return progressView
+    public lazy var audioPlayerView: AudioPlayerView = {
+        let audioPlayerView = AudioPlayerView()
+        audioPlayerView.translatesAutoresizingMaskIntoConstraints = false
+        return audioPlayerView
     }()
     }()
 
 
     // MARK: - Methods
     // MARK: - Methods
-
     /// Responsible for setting up the constraints of the cell's subviews.
     /// Responsible for setting up the constraints of the cell's subviews.
     open func setupConstraints() {
     open func setupConstraints() {
-        playButton.constraint(equalTo: CGSize(width: 25, height: 25))
-        playButton.addConstraints(left: messageContainerView.leftAnchor, centerY: messageContainerView.centerYAnchor, leftConstant: 5)
-        durationLabel.addConstraints(right: messageContainerView.rightAnchor, centerY: messageContainerView.centerYAnchor, rightConstant: 15)
-        progressView.addConstraints(left: playButton.rightAnchor,
-                                    right: durationLabel.leftAnchor,
-                                    centerY: messageContainerView.centerYAnchor,
-                                    leftConstant: 5,
-                                    rightConstant: 5)
+        messageContainerView.removeConstraints(messageContainerView.constraints)
+        let audioPlayerHeight = messageContainerView.frame.height - getMessageLabelHeight()
+        let audioPlayerConstraints = [ audioPlayerView.constraintHeightTo(audioPlayerHeight),
+                                       audioPlayerView.constraintAlignLeadingTo(messageContainerView),
+                                       audioPlayerView.constraintAlignTrailingTo(messageContainerView),
+                                       audioPlayerView.constraintAlignTopTo(messageContainerView)
+        ]
+        messageContainerView.addConstraints(audioPlayerConstraints)
+
+        messageLabel.frame = CGRect(x: 0,
+                                    y: messageContainerView.frame.height - getMessageLabelHeight(),
+                                    width: messageContainerView.frame.width,
+                                    height: getMessageLabelHeight())
+    }
+
+    func getMessageLabelHeight() -> CGFloat {
+        if let text = messageLabel.attributedText {
+            let height = (text.height(withConstrainedWidth:
+                messageContainerView.frame.width -
+                    TextMediaMessageCell.insetHorizontalSmall -
+                    TextMediaMessageCell.insetHorizontalBig))
+            return height + TextMediaMessageCell.insetBottom + TextMediaMessageCell.insetTop
+        }
+        return 0
     }
     }
 
 
     open override func setupSubviews() {
     open override func setupSubviews() {
         super.setupSubviews()
         super.setupSubviews()
-        messageContainerView.addSubview(playButton)
-        messageContainerView.addSubview(durationLabel)
-        messageContainerView.addSubview(progressView)
-        setupConstraints()
+        messageContainerView.addSubview(audioPlayerView)
+        messageContainerView.addSubview(messageLabel)
     }
     }
 
 
     open override func prepareForReuse() {
     open override func prepareForReuse() {
         super.prepareForReuse()
         super.prepareForReuse()
-        progressView.progress = 0
-        playButton.isSelected = false
-        durationLabel.text = "0:00"
+        audioPlayerView.reset()
+        messageLabel.attributedText = nil
     }
     }
 
 
     /// Handle tap gesture on contentView and its subviews.
     /// Handle tap gesture on contentView and its subviews.
     open override func handleTapGesture(_ gesture: UIGestureRecognizer) {
     open override func handleTapGesture(_ gesture: UIGestureRecognizer) {
-        let touchLocation = gesture.location(in: self)
-        // compute play button touch area, currently play button size is (25, 25) which is hardly touchable
-        // add 10 px around current button frame and test the touch against this new frame
-        let playButtonTouchArea = CGRect(playButton.frame.origin.x - 10.0,
-                                         playButton.frame.origin.y - 10,
-                                         playButton.frame.size.width + 20,
-                                         playButton.frame.size.height + 20)
-        let translateTouchLocation = convert(touchLocation, to: messageContainerView)
-        if playButtonTouchArea.contains(translateTouchLocation) {
+        if audioPlayerView.didTapPlayButton(gesture) {
             delegate?.didTapPlayButton(in: self)
             delegate?.didTapPlayButton(in: self)
         } else {
         } else {
             super.handleTapGesture(gesture)
             super.handleTapGesture(gesture)
@@ -100,38 +101,61 @@ open class AudioMessageCell: MessageContentCell {
     }
     }
 
 
     // MARK: - Configure Cell
     // MARK: - Configure Cell
+    open override func apply(_ layoutAttributes: UICollectionViewLayoutAttributes) {
+           super.apply(layoutAttributes)
+           if let attributes = layoutAttributes as? MessagesCollectionViewLayoutAttributes {
+               messageLabel.textInsets = attributes.messageLabelInsets
+               messageLabel.messageLabelFont = attributes.messageLabelFont
+           }
+       }
 
 
     open override func configure(with message: MessageType, at indexPath: IndexPath, and messagesCollectionView: MessagesCollectionView) {
     open override func configure(with message: MessageType, at indexPath: IndexPath, and messagesCollectionView: MessagesCollectionView) {
         super.configure(with: message, at: indexPath, and: messagesCollectionView)
         super.configure(with: message, at: indexPath, and: messagesCollectionView)
 
 
-        guard let dataSource = messagesCollectionView.messagesDataSource else {
+        guard messagesCollectionView.messagesDataSource != nil else {
             fatalError(MessageKitError.nilMessagesDataSource)
             fatalError(MessageKitError.nilMessagesDataSource)
         }
         }
 
 
-        let playButtonLeftConstraint = messageContainerView.constraints.filter { $0.identifier == "left" }.first
-        let durationLabelRightConstraint = messageContainerView.constraints.filter { $0.identifier == "right" }.first
-
-        if !dataSource.isFromCurrentSender(message: message) {
-            playButtonLeftConstraint?.constant = 12
-            durationLabelRightConstraint?.constant = -8
-        } else {
-            playButtonLeftConstraint?.constant = 5
-            durationLabelRightConstraint?.constant = -15
-        }
-
         guard let displayDelegate = messagesCollectionView.messagesDisplayDelegate else {
         guard let displayDelegate = messagesCollectionView.messagesDisplayDelegate else {
             fatalError(MessageKitError.nilMessagesDisplayDelegate)
             fatalError(MessageKitError.nilMessagesDisplayDelegate)
         }
         }
 
 
         let tintColor = displayDelegate.audioTintColor(for: message, at: indexPath, in: messagesCollectionView)
         let tintColor = displayDelegate.audioTintColor(for: message, at: indexPath, in: messagesCollectionView)
-        playButton.imageView?.tintColor = tintColor
-        durationLabel.textColor = tintColor
-        progressView.tintColor = tintColor
-
-        displayDelegate.configureAudioCell(self, message: message)
+        audioPlayerView.setTintColor(tintColor)
 
 
         if case let .audio(audioItem) = message.kind {
         if case let .audio(audioItem) = message.kind {
-            durationLabel.text = displayDelegate.audioProgressTextFormat(audioItem.duration, for: self, in: messagesCollectionView)
+            audioPlayerView.setDuration(formattedText: displayDelegate.audioProgressTextFormat(audioItem.duration,
+                                                                                               for: self,
+                                                                                               in: messagesCollectionView))
+            configureMessageLabel(for: audioItem,
+                                  with: displayDelegate,
+                                  message: message,
+                                  at: indexPath, in: messagesCollectionView)
         }
         }
+
+        setupConstraints()
+        displayDelegate.configureAudioCell(self, message: message)
+    }
+
+    func configureMessageLabel(for audioItem: AudioItem,
+                               with displayDelegate: MessagesDisplayDelegate,
+                               message: MessageType,
+                               at indexPath: IndexPath,
+                               in messagesCollectionView: MessagesCollectionView) {
+           let enabledDetectors = displayDelegate.enabledDetectors(for: message, at: indexPath, in: messagesCollectionView)
+           messageLabel.configure {
+              messageLabel.enabledDetectors = enabledDetectors
+              for detector in enabledDetectors {
+                  let attributes = displayDelegate.detectorAttributes(for: detector, and: message, at: indexPath)
+                  messageLabel.setAttributes(attributes, detector: detector)
+              }
+               messageLabel.attributedText = audioItem.text
+           }
+       }
+
+    /// Used to handle the cell's contentView's tap gesture.
+    /// Return false when the contentView does not need to handle the gesture.
+    open override func cellContentView(canHandle touchPoint: CGPoint) -> Bool {
+        return messageLabel.handleGesture(touchPoint)
     }
     }
 }
 }

+ 1 - 1
deltachat-ios/MessageKit/Views/Cells/ContactMessageCell.swift

@@ -56,7 +56,7 @@ open class ContactMessageCell: MessageContentCell {
     
     
     /// The disclouser image view
     /// The disclouser image view
     public lazy var disclosureImageView: UIImageView = {
     public lazy var disclosureImageView: UIImageView = {
-        let disclouserImage = UIImage.messageKitImageWith(type: .disclouser)?.withRenderingMode(.alwaysTemplate)
+        let disclouserImage = UIImage(named: "disclouser")?.withRenderingMode(.alwaysTemplate)
         let disclouser = UIImageView(image: disclouserImage)
         let disclouser = UIImageView(image: disclouserImage)
         return disclouser
         return disclouser
     }()
     }()

+ 19 - 0
deltachat-ios/Model/Audio.swift

@@ -0,0 +1,19 @@
+import CoreLocation
+import Foundation
+import UIKit
+
+struct Audio: AudioItem {
+    var size: CGSize = CGSize(width: 250, height: 50)
+
+    var url: URL
+
+    var duration: Float
+
+    var text: NSAttributedString?
+
+    init(url: URL, duration: Float, text: NSAttributedString? = nil) {
+        self.url = url
+        self.duration = duration
+        self.text = text
+    }
+}