QMChatRoomMp3Cell.m 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283
  1. //
  2. // QMChatRoomMp3Cell.m
  3. // IMSDK-OC
  4. //
  5. // Created by lishuijiao on 2018/4/23.
  6. // Copyright © 2018年 HCF. All rights reserved.
  7. //
  8. #import "QMChatRoomMp3Cell.h"
  9. #import <AVFoundation/AVFoundation.h>
  10. #import <QMLineSDK/QMLineSDK.h>
  11. #import "QMAudioPlayer.h"
  12. #import "QMAudioAnimation.h"
  13. @interface QMChatRoomMp3Cell() <AVAudioPlayerDelegate>
  14. @end
  15. @implementation QMChatRoomMp3Cell
  16. {
  17. UIImageView *_voicePlayImageView;
  18. UILabel *_secondsLabel;
  19. AVAudioSession *_audioSession;
  20. NSString *_messageId;
  21. UIImageView *_badgeView;
  22. }
  23. - (instancetype)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString *)reuseIdentifier {
  24. self = [super initWithStyle:style reuseIdentifier:reuseIdentifier];
  25. if (self) {
  26. [self createUI];
  27. }
  28. return self;
  29. }
  30. - (void)createUI {
  31. _voicePlayImageView = [[UIImageView alloc] init];
  32. _voicePlayImageView.animationDuration = 1.0;
  33. [self.chatBackgroudImage addSubview:_voicePlayImageView];
  34. _secondsLabel = [[UILabel alloc] init];
  35. _secondsLabel.backgroundColor = [UIColor clearColor];
  36. _secondsLabel.font = [UIFont systemFontOfSize:16];
  37. [self.chatBackgroudImage addSubview:_secondsLabel];
  38. _badgeView = [[UIImageView alloc] init];
  39. _badgeView.backgroundColor = [UIColor redColor];
  40. _badgeView.layer.cornerRadius = 4;
  41. _badgeView.layer.masksToBounds = YES;
  42. [_badgeView setHidden:YES];
  43. [self.contentView addSubview:_badgeView];
  44. _audioSession = [AVAudioSession sharedInstance];
  45. UILongPressGestureRecognizer * longPressGesture = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(longPressTapGesture:)];
  46. [_voicePlayImageView addGestureRecognizer:longPressGesture];
  47. // 默认为听筒
  48. [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
  49. }
  50. - (void)setData:(CustomMessage *)message avater:(NSString *)avater {
  51. _messageId = message._id;
  52. self.message = message;
  53. [super setData:message avater:avater];
  54. UITapGestureRecognizer * tapPressGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapPressGesture:)];
  55. [self.chatBackgroudImage addGestureRecognizer:tapPressGesture];
  56. NSString *playUrl = [NSString stringWithFormat:@"%@/%@/%@", NSHomeDirectory(), @"Documents", [NSString stringWithFormat:@"%@", self.message._id]];
  57. dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
  58. dispatch_async(queue, ^{
  59. [self downloadFile:playUrl];
  60. });
  61. if ([message.fromType isEqualToString:@"0"]) {
  62. self.chatBackgroudImage.frame = CGRectMake(CGRectGetMinX(self.iconImage.frame)-10-125, CGRectGetMaxY(self.timeLabel.frame)+10, 125, 40);
  63. self.sendStatus.frame = CGRectMake(CGRectGetMinX(self.chatBackgroudImage.frame)-25, CGRectGetMaxY(self.chatBackgroudImage.frame)-32, 20, 20);
  64. _voicePlayImageView.frame = CGRectMake(125-35, 11, 13, 17);
  65. _voicePlayImageView.image = [UIImage imageNamed:@"SenderVoiceNodePlaying"];
  66. if ([message.status isEqualToString:@"0"]) {
  67. _secondsLabel.textColor = [UIColor whiteColor];
  68. _secondsLabel.frame = CGRectMake(CGRectGetMinX(_voicePlayImageView.frame)-50, 11, 40, 20);
  69. _secondsLabel.text = [NSString stringWithFormat:@"%@''",message.recordSeconds];
  70. _secondsLabel.textAlignment = NSTextAlignmentRight;
  71. _secondsLabel.hidden = NO;
  72. }else {
  73. _secondsLabel.hidden = YES;
  74. }
  75. self.sendStatus.frame = CGRectMake(CGRectGetMinX(self.chatBackgroudImage.frame)-25, CGRectGetMinY(self.chatBackgroudImage.frame)+10, 20, 20);
  76. [[QMAudioAnimation sharedInstance]setAudioAnimationPlay:YES and:_voicePlayImageView];
  77. [_badgeView setHidden:YES];
  78. }else {
  79. self.chatBackgroudImage.frame = CGRectMake(CGRectGetMaxX(self.iconImage.frame)+5, CGRectGetMaxY(self.timeLabel.frame)+10, 125, 40);
  80. _badgeView.frame = CGRectMake(CGRectGetMaxX(self.chatBackgroudImage.frame)+5, CGRectGetMaxY(self.timeLabel.frame)+15, 8, 8);
  81. _voicePlayImageView.frame = CGRectMake(22, 11, 13, 17);
  82. _voicePlayImageView.image = [UIImage imageNamed:@"ReceiverVoiceNodePlaying"];
  83. _secondsLabel.textColor = [UIColor blackColor];
  84. _secondsLabel.frame = CGRectMake(CGRectGetMaxX(_voicePlayImageView.frame)+10, 11, 40, 20);
  85. _secondsLabel.text = [NSString stringWithFormat:@"%@''",message.recordSeconds ? message.recordSeconds : 0];
  86. _secondsLabel.textAlignment = NSTextAlignmentLeft;
  87. [[QMAudioAnimation sharedInstance]setAudioAnimationPlay:NO and:_voicePlayImageView];
  88. CustomMessage *msg = [QMConnect getOneDataFromDatabase:message._id].firstObject;
  89. if ([msg.isRead isEqualToString:@"1"]) {
  90. [_badgeView setHidden:YES];
  91. }else {
  92. [_badgeView setHidden:NO];
  93. }
  94. }
  95. NSString *fileName;
  96. if ([self existFile:self.message.message]) {
  97. fileName = self.message.message;
  98. }else {
  99. fileName = self.message._id;
  100. }
  101. if ([[QMAudioPlayer sharedInstance] isPlaying:fileName] == true) {
  102. [[QMAudioAnimation sharedInstance]startAudioAnimation:_voicePlayImageView];
  103. }
  104. }
  105. - (void)longPressTapGesture:(UILongPressGestureRecognizer *)sender {
  106. if (sender.state == UIGestureRecognizerStateBegan) {
  107. [self becomeFirstResponder];
  108. UIMenuController *menu = [UIMenuController sharedMenuController];
  109. UIMenuItem *reciverMenu = [[UIMenuItem alloc] initWithTitle:NSLocalizedString(@"button.receiver", nil) action:@selector(reciverMenu:)];
  110. UIMenuItem *speakerMenu = [[UIMenuItem alloc] initWithTitle:NSLocalizedString(@"button.speaker", nil) action:@selector(speakerMenu:)];
  111. UIMenuItem *removeMenu = [[UIMenuItem alloc] initWithTitle:NSLocalizedString(@"button.delete", nil) action:@selector(removeMenu:)];
  112. [menu setMenuItems:[NSArray arrayWithObjects:reciverMenu,speakerMenu,removeMenu, nil]];
  113. [menu setTargetRect:self.chatBackgroudImage.frame inView:self];
  114. [menu setMenuVisible:true animated:true];
  115. UIWindow *window = [[[UIApplication sharedApplication] delegate] window];
  116. if ([window isKeyWindow] == NO) {
  117. [window becomeKeyWindow];
  118. [window makeKeyAndVisible];
  119. }
  120. }
  121. }
  122. - (BOOL)canBecomeFirstResponder {
  123. return YES;
  124. }
  125. - (BOOL)canPerformAction:(SEL)action withSender:(id)sender {
  126. if (action == @selector(reciverMenu:) || action == @selector(speakerMenu:) || action == @selector(removeMenu:)) {
  127. return YES;
  128. }else {
  129. return NO;
  130. }
  131. }
  132. - (void)reciverMenu:(id)sendr {
  133. //听筒
  134. NSError *error = nil;
  135. if ([[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:&error]) {
  136. }
  137. }
  138. - (void)speakerMenu:(id)sender {
  139. // 扬声器
  140. NSError *error = nil;
  141. [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:&error];
  142. }
  143. - (void)removeMenu:(id)sender {
  144. // 删除语音(只能删除本地数据库消息)
  145. // 删除文本消息
  146. UIAlertController *alertController = [UIAlertController alertControllerWithTitle:NSLocalizedString(@"title.prompt", nil) message:NSLocalizedString(@"title.statement", nil) preferredStyle:UIAlertControllerStyleAlert];
  147. UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString(@"button.cancel", nil) style:UIAlertActionStyleCancel handler:^(UIAlertAction * _Nonnull action) {
  148. }];
  149. UIAlertAction *sureAction = [UIAlertAction actionWithTitle:NSLocalizedString(@"button.sure", nil) style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
  150. [QMConnect removeDataFromDataBase:_messageId];
  151. [[NSNotificationCenter defaultCenter] postNotificationName:CHATMSG_RELOAD object:nil];
  152. }];
  153. [alertController addAction:cancelAction];
  154. [alertController addAction:sureAction];
  155. [[UIApplication sharedApplication].keyWindow.rootViewController presentViewController:alertController animated:YES completion:nil];
  156. }
  157. - (void)tapPressGesture:(id)sender {
  158. NSLog(@"点击语音消息");
  159. [_badgeView setHidden:YES];
  160. [QMConnect changeAudioMessageStatus:_messageId];
  161. [[QMAudioAnimation sharedInstance] stopAudioAnimation:nil];
  162. [[QMAudioAnimation sharedInstance] startAudioAnimation:_voicePlayImageView];
  163. dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)((_secondsLabel.text).intValue * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
  164. [[QMAudioAnimation sharedInstance] stopAudioAnimation:_voicePlayImageView];
  165. });
  166. NSString *fileName;
  167. if ([self existFile:self.message.message]) {
  168. fileName = self.message.message;
  169. }else if ([self existFile:[NSString stringWithFormat:@"%@", self.message._id]]) {
  170. fileName = self.message._id;
  171. }else {
  172. NSString *playUrl = [NSString stringWithFormat:@"%@/%@/%@", NSHomeDirectory(), @"Documents", [NSString stringWithFormat:@"%@", self.message._id]];
  173. fileName = self.message._id;
  174. NSURL *fileUrl = [NSURL URLWithString:self.message.remoteFilePath];
  175. NSData *data = [NSData dataWithContentsOfURL:fileUrl];
  176. [data writeToFile:playUrl atomically:YES];
  177. }
  178. [[QMAudioPlayer sharedInstance] startAudioPlayer:fileName withDelegate:self];
  179. }
  180. - (BOOL)existFile: (NSString *)name {
  181. NSString * filePath = [NSString stringWithFormat:@"%@/%@/%@", NSHomeDirectory(), @"Documents", name];
  182. NSFileManager *fileManager = [NSFileManager defaultManager];
  183. if ([fileManager fileExistsAtPath:filePath]) {
  184. return YES;
  185. }else {
  186. return NO;
  187. }
  188. }
  189. - (void)downloadFile: (NSString *)fileStr {
  190. NSString *timeStr = [QMConnect queryMp3FileMessageSize:self.message._id];
  191. if ([timeStr isEqualToString:@"0"]) {
  192. NSString *str = [self.message.remoteFilePath stringByAddingPercentEncodingWithAllowedCharacters:NSCharacterSet.URLQueryAllowedCharacterSet];
  193. NSURL *fileUrl = [NSURL URLWithString:str];
  194. NSData *data = [NSData dataWithContentsOfURL:fileUrl];
  195. [data writeToFile:fileStr atomically:YES];
  196. float aaa = [self fileAllTime:str];
  197. dispatch_async(dispatch_get_main_queue(), ^{
  198. _secondsLabel.text = [NSString stringWithFormat:@"%d", (int)aaa];
  199. [QMConnect changeMp3FileMessageSize:self.message._id fileSize:[NSString stringWithFormat:@"%d", (int)aaa]];
  200. });
  201. }else {
  202. dispatch_sync(dispatch_get_main_queue(), ^{
  203. _secondsLabel.text = timeStr;
  204. });
  205. }
  206. }
  207. - (float)fileAllTime: (NSString *)str {
  208. NSURL *fileUrl = [NSURL URLWithString:str];
  209. NSDictionary *options = @{AVURLAssetPreferPreciseDurationAndTimingKey: @YES};
  210. AVURLAsset *audioAsset = [AVURLAsset URLAssetWithURL:fileUrl options:options];
  211. CMTime audioDuration = audioAsset.duration;
  212. float audioDurationSeconds = CMTimeGetSeconds(audioDuration);
  213. return audioDurationSeconds;
  214. }
  215. - (void)awakeFromNib {
  216. // Initialization code
  217. [super awakeFromNib];
  218. }
  219. - (void)setSelected:(BOOL)selected animated:(BOOL)animated {
  220. [super setSelected:selected animated:animated];
  221. // Configure the view for the selected state
  222. }
  223. @end