//
//  image_viewController.swift
//  AIMobileOfficeiOSApp
//
//  Created by 邓杰 on 2018/6/21.
//  Copyright © 2018年 AIMobileOffice. All rights reserved.
//

import UIKit
import  Speech


class pptImageCell: UICollectionViewCell
{
    @IBOutlet var  imageView:UIImageView!
}

class image_viewController:UIViewController,UICollectionViewDataSource,UICollectionViewDelegate,SFSpeechRecognizerDelegate
{
    @IBOutlet var imageView:UIImageView!
    @IBOutlet var messageBox:UITextField!
    @IBOutlet var sendButton:UIButton!
    @IBOutlet var audioButton:UIButton!
    @IBOutlet var collectionView:UICollectionView!
    @IBOutlet var textView:UITextView!
    @IBOutlet var scrollView:UIScrollView!
    
    private let speechRecognizer=SFSpeechRecognizer(locale: Locale.init(identifier:"zh-CN"))
    private var recognitionRequest:SFSpeechAudioBufferRecognitionRequest?
    private var recognitionTask: SFSpeechRecognitionTask?
    private let audioEngine = AVAudioEngine()
    internal static var i=1
    internal static var h=1
    internal static var number_mapMessage = Dictionary<Int,MessageInformation>()
    
    let reuseIdentifier="cell"
    
    var images_s:[PPT_image] = []
    var picture_forImgae:UIImage?
    var new_imageName: String?
    
    override func viewDidLoad() {
        super.viewDidLoad()
        
        //define the back button in naviagtion bar
        self.navigationItem.hidesBackButton=true
        let newBackButton = UIBarButtonItem (title: "Back", style: UIBarButtonItemStyle.plain, target: self, action: #selector(image_viewController.GoBack(sender:)))
        self.navigationItem.leftBarButtonItem=newBackButton
        imageView.image=picture_forImgae ?? UIImage(named: images_s[0].imagename)
        if new_imageName != nil
        {
            print(new_imageName!)
            images_s.append(PPT_image(imageName: new_imageName!))
        }
        let frame:CGRect=self.scrollView.frame
        self.scrollView=UITextView.init(frame: CGRect.init(x: frame.origin.x, y: frame.origin.y, width: frame.size.width, height: frame.size.height))
        self.scrollView.showsVerticalScrollIndicator=true
        self.scrollView.isUserInteractionEnabled=true
        self.scrollView.isScrollEnabled=true
        self.view.addSubview(scrollView)
        
       // let collectionview=BannerView.init(frame: CGRect.init(x: 0, y: 0, width: self.view.frame.size.width, height: 200))
       // self.view.addSubview(collectionview)  //here and above is about the setting of the collectionView
        print("----------------------\(image_viewController.number_mapMessage.count)")
        if image_viewController.number_mapMessage.count > 0
        {
            for i in 1..<image_viewController.number_mapMessage.count+1
            {
                let a:MessageInformation = image_viewController.number_mapMessage[i] as! MessageInformation
                let textfield_height = i*40
                let textField:UITextField?=UITextField(frame: CGRect(x:8,y:20,width:Int(self.view.frame.size.width-40),height: textfield_height))
                if a.type_ofMessager == "client"{
                    textField?.attributedPlaceholder=NSAttributedString(string: a.message_String,attributes:[NSAttributedStringKey.foregroundColor:UIColor.black])
                }
                if a.type_ofMessager == "default"{
                    textField?.attributedPlaceholder=NSAttributedString(string: a.message_String,attributes:[NSAttributedStringKey.foregroundColor:UIColor.red])
                }
                self.scrollView.addSubview(textField!)
                if Int(self.scrollView.contentSize.height-10)<textfield_height
                {
                    self.scrollView.contentSize.height=self.scrollView.frame.height*CGFloat(image_viewController.h)
                    image_viewController.h=image_viewController.h+1
                }
                // self.scrollView.textColor=UIColor.blue
            }
            image_viewController.i=image_viewController.number_mapMessage.count+1
        }
        
        audioButton.isEnabled = false  //2
        
        speechRecognizer?.delegate = self  //3
        
        SFSpeechRecognizer.requestAuthorization { (authStatus) in  //4
            
            var isButtonEnabled = false
            
            switch authStatus {  //5
            case .authorized:
                isButtonEnabled = true
                
            case .denied:
                isButtonEnabled = false
                print("User denied access to speech recognition")
                
            case .restricted:
                isButtonEnabled = false
                print("Speech recognition restricted on this device")
                
            case .notDetermined:
                isButtonEnabled = false
                print("Speech recognition not yet authorized")
            }
            
            OperationQueue.main.addOperation() {
                self.audioButton.isEnabled = isButtonEnabled
            }
        }
        // collectionview.setupCollectionView()
    }
    
    @objc func GoBack(sender:UIBarButtonItem){
        self.tabBarController?.tabBar.isHidden=false
        let mainStoryboard:UIStoryboard = UIStoryboard(name: "Main", bundle:nil)
        let desVC = mainStoryboard.instantiateViewController(withIdentifier: "NewFile_viewController") as! NewFile_viewController
        self.navigationController?.pushViewController(desVC, animated: true)
    }
    
    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }
    
    @IBAction func audioButton_press()
    {
        if audioEngine.isRunning {
            audioEngine.stop()
            recognitionRequest?.endAudio()
            audioButton.isEnabled=false
        }else
        {
            startRecording()
        }
    }
    
    @IBAction func sendButton_press()
    {
        let sentence=messageBox.text
        
        let message: MessageInformation = MessageInformation(messageType: "client", messageInfo: sentence!)
        image_viewController.number_mapMessage[image_viewController.i] = message
        print("-------------------------------------------------------------\(image_viewController.i)")
        
        print(image_viewController.number_mapMessage[1]?.message_String ?? "hehh")
        
        var url_string="http://193.112.102.17:5000/aioffice/call_chatbot?product=PowerPoint&message="
        url_string.append(sentence!)
        print(url_string)
        let encodedStr=url_string.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed)  //this can allow chinese occur in the URL
        
        let textfield_height = image_viewController.i*30
        let textField:UITextField?=UITextField(frame: CGRect(x:8,y:20,width:Int(self.view.frame.size.width-40),height: textfield_height))
        textField?.attributedPlaceholder=NSAttributedString(string: sentence!,attributes:[NSAttributedStringKey.foregroundColor:UIColor.black])
        self.scrollView.addSubview(textField!)
        if Int(self.scrollView.contentSize.height-10)<textfield_height
        {
            self.scrollView.contentSize.height=self.scrollView.frame.height*CGFloat(image_viewController.h)
            image_viewController.h=image_viewController.h+1
        }
        image_viewController.i = image_viewController.i+1
        
        self.audioEngine.stop()
        recognitionRequest?.endAudio()
        messageBox.text=""
        
      guard let final_url=URL(string: encodedStr!) else {
        print("cannot connect to the internet")
        return
        } //define url
       let session = URLSession.shared.dataTask(with: final_url) { (data, response, error) in
            if let response = response {
                print(response)
            }
             if error != nil {
                print(error ?? "error happening")
            }
             if let data = data{
                do{
                    let json = try JSONSerialization.jsonObject(with: data, options:[]) as? NSDictionary
                    print("here is Json stuff ______________------------")
                    print(json!)
                    print("intent:\(json!["intent"]!)")
                    print("message:\(json!["message"]!)")
                    print("produce:\(json!["product"]!)")
                    print("office_module_result:\(json!["office_module_result"]!)")
                }catch {
                    print(error)
                }
            }
        }
        session.resume()
        
        let images:[PPT_image]=[
            PPT_image(imageName: "cafedeadend.jpg"),
            PPT_image(imageName: "homei.jpg"),
            PPT_image(imageName: "teakha.jpg"),
            PPT_image(imageName: "cafeloisl.jpg"),
            PPT_image(imageName: "petiteoyster.jpg"),
            PPT_image(imageName: "forkeerestaurant.jpg"),
            PPT_image(imageName: "posatelier.jpg"),
            PPT_image(imageName: "bourkestreetbakery.jpg"),
            PPT_image(imageName: "haighschocolate.jpg"),
        ]
        let mainStoryboard:UIStoryboard = UIStoryboard(name:"Main",bundle:nil)
        let desVC = mainStoryboard.instantiateViewController(withIdentifier:"Recommend_ViewController") as! Recommend_ViewController
        desVC.images=images
        self.navigationController?.pushViewController(desVC, animated: true)
    }
    
    
    func startRecording()
    {
        if recognitionTask != nil{
            recognitionTask?.cancel()
            recognitionTask=nil
        }
        
        let audioSession=AVAudioSession.sharedInstance()
        do{
            try audioSession.setCategory(AVAudioSessionCategoryRecord)
            try audioSession.setMode(AVAudioSessionModeMeasurement)
            try audioSession.setActive(true,with: .notifyOthersOnDeactivation)
        } catch{
            print("audioSession properties weren't set because of an error")
        }
        recognitionRequest=SFSpeechAudioBufferRecognitionRequest()
        
        let inputNode = audioEngine.inputNode
        if inputNode == nil{
            fatalError("audio engine has no inputNode")
        }
        
        guard let recognitionRequest = recognitionRequest else {
            fatalError("Unable to create an SFSpeechAudioBufferRecognition")
        }
        
        recognitionRequest.shouldReportPartialResults=true
        
        recognitionTask = speechRecognizer?.recognitionTask(with: recognitionRequest, resultHandler: { (result, error) in
            
            var isFinal = false
            
            if result != nil {
                self.messageBox.text=(result?.bestTranscription.formattedString)!
                isFinal = (result?.isFinal)!
            }
            
            if error != nil || isFinal {
                self.audioEngine.stop()
                inputNode.removeTap(onBus: 0)
                
                self.recognitionRequest = nil
                self.recognitionTask = nil
    
                self.audioButton.isEnabled = true
            }
        })
        
        let recordingFormat = inputNode.outputFormat(forBus: 0)
        inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { (buffer, when) in
            self.recognitionRequest?.append(buffer)
        }
        
        audioEngine.prepare()
        
        do{
            try audioEngine.start()
        }catch{
            print("audioEngine couldn't start because of an error")
        }
        messageBox.text="say something i am listening"
    }
    
    func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) {
        if available{
            audioButton.isEnabled=true
        }else {
            audioButton.isEnabled=false
        }
    }
    
    func numberOfSections(in collectionView: UICollectionView) -> Int {
        return 1
    }
    
    func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
        return images_s.count
    }
    
    func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
        let cell = collectionView.dequeueReusableCell(withReuseIdentifier: reuseIdentifier, for: indexPath) as! pptImageCell
        
        //cell.imageView.backgroundColor = UIColor.randomColor()
        cell.imageView.image = UIImage(named: images_s[indexPath.row].imagename)
        
        return cell
    }
    
    func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
        imageView.image=UIImage(named: images_s[indexPath.row].imagename)
    }

}
