//
//  JSQMessageTools.m
//  Conversation
//
//  Created by C_HAO on 15/9/23.
//  Copyright © 2015年 CHAOO. All rights reserved.
//

#import "JSQMessageTools.h"
#import "NSBundle+JSQMessages.h"
#import "JSQEmotionTextAttachment.h"
#import "UIImage+JSQMessages.h"
#import "VoiceConverter.h"
#import <ImageIO/ImageIO.h>


@implementation JSQMessageTools

static NSDictionary *_expression;
static NSDictionary *_expressionConvert;


+ (NSDictionary *)expression {
    static dispatch_once_t predicate;
    dispatch_once(&predicate, ^{
        NSBundle *bundle = [NSBundle jsq_messagesAssetBundle];

        NSString *plistpath = [bundle pathForResource:@"Emoticon" ofType:@"plist" inDirectory:@"Face"];

        _expression = [[NSDictionary alloc] initWithContentsOfFile:plistpath];
    });
    return _expression;
}

+ (NSDictionary *)expressionConvert {
    static dispatch_once_t predicate;
    dispatch_once(&predicate, ^{
        NSMutableDictionary *dic = [[NSMutableDictionary alloc] init];
        for (NSString *srt in [[JSQMessageTools expression] allKeys]) {
            [dic setObject:srt forKey:[[JSQMessageTools expression] objectForKey:srt]];
        }
        _expressionConvert = dic;
    });
    return _expressionConvert;
}

+ (NSDictionary *)jsq_linkTextAttributes {
    return @{ NSFontAttributeName : [UIFont systemFontOfSize:16.0f],
              NSForegroundColorAttributeName : [UIColor blackColor] };
}

+ (BOOL)analyzeEmotion:(NSMutableAttributedString * *)emotionAttributedString {
    NSMutableString *text = [[NSMutableString alloc] initWithString:(*emotionAttributedString).string];
    NSError *error;
    NSString *regulaStr = @"~:[a-z][0-9]~";
    NSRegularExpression *regex = [NSRegularExpression regularExpressionWithPattern:regulaStr
                                                                           options:NSRegularExpressionCaseInsensitive
                                                                             error:&error];
    NSArray *arrayOfAllMatches = [regex matchesInString:text options:0 range:NSMakeRange(0, [text length])];

    for (NSTextCheckingResult *match in arrayOfAllMatches) {
        NSRange matchRange = NSMakeRange(match.range.location - (match.range.length * [arrayOfAllMatches indexOfObject:match]) + [arrayOfAllMatches indexOfObject:match], match.range.length);
        NSString *substringForMatch = [text substringWithRange:matchRange];
        NSString *imageName = [[JSQMessageTools expression] objectForKey:substringForMatch];
        if (imageName) {
            imageName = [imageName stringByAppendingString:@"@2x"];
        }
        UIImage *emotionImage = [UIImage jsq_expression:imageName];
        JSQEmotionTextAttachment *attachment = [[JSQEmotionTextAttachment alloc] init];
        attachment.bounds = CGRectMake(0, 0, emotionImage.size.width, emotionImage.size.height);
        attachment.image = emotionImage;
        attachment.emotionMark = substringForMatch;
        attachment.emotionSize = CGSizeMake(emotionImage.size.width, emotionImage.size.height);
        NSAttributedString *attachStr = [NSAttributedString attributedStringWithAttachment:attachment];

        [*emotionAttributedString deleteCharactersInRange: matchRange];
        [*emotionAttributedString insertAttributedString: attachStr atIndex: matchRange.location];
        text = (*emotionAttributedString).mutableString;
    }

    return arrayOfAllMatches.count;
}

+ (NSString *)getCurrentSystemDateSecond {
    NSDate *currentDate = [NSDate date];
    NSTimeInterval timeInterval = [currentDate timeIntervalSince1970];
    NSString *strTimeInterval = @(floor(timeInterval)).stringValue;
    return strTimeInterval;
}

+ (BOOL)isFilePath:(NSString *)filePath {
    NSFileManager *fileManager = [NSFileManager defaultManager];
    return [fileManager fileExistsAtPath:filePath];
}

+ (NSString *)fileSizeWithByteString:(NSString *)filePath {
    NSString *strFileSize = @"0";
    NSError *error = nil;

    if ([JSQMessageTools isFilePath:filePath]) {
        NSDictionary *fileAttributes = [[NSFileManager defaultManager] attributesOfItemAtPath:filePath error:&error];
        if (fileAttributes != nil) {
            float fileSize = [[fileAttributes objectForKey:NSFileSize] floatValue];
            strFileSize = [NSString stringWithFormat:@"%.0f", fileSize];
        }
    }

    return strFileSize;
}

+ (NSString *)audioFilePath:(NSString *)filePath {
    NSString *subType = [[NSString alloc] initWithString:[[filePath componentsSeparatedByString:@"/"] lastObject]];

    NSString *tempPlayVoiceFilePath = filePath;

    if (![filePath hasSuffix:@".xxx"] && [subType hasSuffix:@".amr"]) {
        filePath = [[NSString alloc] initWithFormat:@"%@.wav", filePath];
    }
    if ([JSQMessageTools isFilePath:filePath]) {
        return filePath;
    }

    if ([subType hasSuffix:@".amr"] && ![filePath hasSuffix:@".xxx"]) {
        [VoiceConverter ConvertAmrToWav:tempPlayVoiceFilePath wavSavePath:filePath];
    }

    return filePath;
}

+ (BOOL)timeDifference:(NSDate *)startDate date:(NSDate *)endDate interval:(NSInteger)seconds {
    if (fabs([endDate timeIntervalSince1970] - [startDate timeIntervalSince1970]) > seconds) {
        return YES;
    } else {
        return NO;
    }
}

+ (NSString *)enumToString:(MessageClass)messageClass {
    switch (messageClass) {
    case MessagePMS: return @"PMS"; break;
    case MessageIYM: return @"IYM"; break;
    case MessageMMS: return @"MMS"; break;
    case MessageJMG: return @"JMG"; break;
    case MessageLMG: return @"LMG"; break;
    case MessageMMG: return @"MMG"; break;
    case MessageACC: return @"ACC"; break;
    case MessageADC: return @"ADC"; break;
    case MessageWAF: return @"WAF"; break;
    case MessageCAF: return @"CAF"; break;
    case MessageDLF: return @"DLF"; break;
    case MessageDLT: return @"DLT"; break;
    case MessageDLD: return @"DLD"; break;
    case MessageAMS: return @"AMS"; break;
    case MessageJGI: return @"JGI"; break;
    case MessageCJG: return @"CJG"; break;
    case MessageCNM: return @"CNM"; break;
    case MessageSBM: return @"SBM"; break;
    case MessageFAH: return @"FAH"; break;
    case MessageSOM: return @"SOM"; break;
    case MessageDDM: return @"DDM"; break;
    case MessageRSC: return @"RSC"; break;
    default: return [NSString string]; break;
    }
}

+ (NSString *)enumMessageTypeToString:(JSQMessageType)messageType {
    switch (messageType) {
    case MESSAGE_TYPE_TEXT:        return @"text"; break;
    case MESSAGE_TYPE_IMAGE:       return @"image"; break;
    case MESSAGE_TYPE_VOICE:       return @"audio"; break;
    case MESSAGE_TYPE_LOCATION:    return @"link"; break;
    case MESSAGE_TYPE_SOS:         return @"sos"; break;
    default: return [NSString string]; break;
    }
}

static size_t getAssetBytesCallback(void *info, void *buffer, off_t position, size_t count) {
    ALAssetRepresentation *rep = (__bridge id)info;

    NSError *error = nil;
    size_t countRead = [rep getBytes:(uint8_t *)buffer fromOffset:position length:count error:&error];

    if (countRead == 0 && error) {
        // We have no way of passing this info back to the caller, so we log it, at least.
        NSLog(@"thumbnailForAsset:maxPixelSize: got an error reading an asset: %@", error);
    }

    return countRead;
}

static void releaseAssetCallback(void *info) {
    // The info here is an ALAssetRepresentation which we CFRetain in thumbnailForAsset:maxPixelSize:.
    // This release balances that retain.
    CFRelease(info);
}

// Returns a UIImage for the given asset, with size length at most the passed size.
// The resulting UIImage will be already rotated to UIImageOrientationUp, so its CGImageRef
// can be used directly without additional rotation handling.
// This is done synchronously, so you should call this method on a background queue/thread.
+ (UIImage *)thumbnailForAsset:(ALAsset *)asset maxPixelSize:(NSUInteger)size {
    NSParameterAssert(asset != nil);
    NSParameterAssert(size > 0);

    ALAssetRepresentation *rep = [asset defaultRepresentation];

    CGDataProviderDirectCallbacks callbacks = {
        .version = 0,
        .getBytePointer = NULL,
        .releaseBytePointer = NULL,
        .getBytesAtPosition = getAssetBytesCallback,
        .releaseInfo = releaseAssetCallback,
    };

    CGDataProviderRef provider = CGDataProviderCreateDirect((void *)CFBridgingRetain(rep), [rep size], &callbacks);
    CGImageSourceRef source = CGImageSourceCreateWithDataProvider(provider, NULL);

    CGImageRef imageRef = CGImageSourceCreateThumbnailAtIndex(source, 0, (__bridge CFDictionaryRef)@{
        (NSString *)kCGImageSourceCreateThumbnailFromImageAlways : @YES,
        (NSString *)kCGImageSourceThumbnailMaxPixelSize : [NSNumber numberWithUnsignedInteger:size],
        (NSString *)kCGImageSourceCreateThumbnailWithTransform : @YES,
    });
    CFRelease(source);
    CFRelease(provider);

    if (!imageRef) {
        return nil;
    }

    UIImage *toReturn = [UIImage imageWithCGImage:imageRef];

    CFRelease(imageRef);

    return toReturn;
}

+ (UIImage *)scaleToSize:(UIImage *)image size:(CGSize)size {
    UIImage *newImage = nil;
    CGSize imageSize = image.size;
    CGFloat width = imageSize.width;
    CGFloat height = imageSize.height;
    CGFloat targetWidth = size.width;
    CGFloat targetHeight = size.height;
    CGFloat scaleFactor = 0.0;
    CGFloat scaledWidth = targetWidth;
    CGFloat scaledHeight = targetHeight;
    CGPoint thumbnailPoint = CGPointMake(0.0, 0.0);
    if (CGSizeEqualToSize(imageSize, size) == NO) {
        CGFloat widthFactor = targetWidth / width;
        CGFloat heightFactor = targetHeight / height;
        if (widthFactor > heightFactor) {
            scaleFactor = widthFactor;
        } else {
            scaleFactor = heightFactor;
        }
        scaledWidth = width * scaleFactor;
        scaledHeight = height * scaleFactor;
        if (widthFactor > heightFactor) {
            thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
        } else if (widthFactor < heightFactor) {
            thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
        }
    }

    UIGraphicsBeginImageContext(size);

    CGRect thumbnailRect = CGRectZero;
    thumbnailRect.origin = thumbnailPoint;
    thumbnailRect.size.width = scaledWidth;
    thumbnailRect.size.height = scaledHeight;
    [image drawInRect:thumbnailRect];
    newImage = UIGraphicsGetImageFromCurrentImageContext();

    if (newImage == nil) {
        NSLog(@"scale image fail");
    }

    UIGraphicsEndImageContext();

    return newImage;
}

+ (NSString *)imagePath:(NSString *)imageName {
    NSFileManager *manager = [NSFileManager defaultManager];
    BOOL isDirectory = NO;
    NSString *caches = [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES) firstObject];
    NSString *imageDirectory = [NSString stringWithFormat:@"%@/Image/", caches];
    if (NO == [manager fileExistsAtPath:imageDirectory isDirectory:&isDirectory]) {
        [manager createDirectoryAtPath:imageDirectory withIntermediateDirectories:YES attributes:nil error:nil];
    }
    NSString *path = [NSString stringWithFormat:@"%@%@.png", imageDirectory, imageName];
    return path;
}

+ (NSString *)voicePath:(NSString *)voiceName {
    NSFileManager *manager = [NSFileManager defaultManager];
    BOOL isDirectory = NO;
    NSString *caches = [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES) firstObject];
    NSString *voiceDirectory = [NSString stringWithFormat:@"%@/Voice/", caches];
    if (NO == [manager fileExistsAtPath:voiceDirectory isDirectory:&isDirectory]) {
        [manager createDirectoryAtPath:voiceDirectory withIntermediateDirectories:YES attributes:nil error:nil];
    }
    NSString *path = [NSString stringWithFormat:@"%@%@.3gp", voiceDirectory, voiceName];
    return path;
}

@end
