import React, { useEffect, useState, useRef } from 'react';
import { Text, View, StyleSheet, Dimensions, TextInput, Image, TouchableOpacity, PermissionsAndroid, Alert, FlatList } from 'react-native';
import { AudioRecorder, AudioUtils } from 'react-native-audio';
import Sound from 'react-native-sound';
import creatChatroom from '../js/yunxin';

let WINDOW = Dimensions.get('window');
let width = WINDOW.width / 3;
let height = WINDOW.height / 2;

const styles = StyleSheet.create({
  box: {
    position: 'absolute',
    right: 0,
    alignItems: 'flex-end',
  },
  chatBox: {
    backgroundColor: 'lemonchiffon',
    height: height,
    width: width,
    borderStyle: 'solid',
    borderWidth: 2,
    borderColor: 'lightcoral',
    borderRadius: 5,
    borderLeftWidth: 0,
    borderTopLeftRadius: 0,
    borderBottomLeftRadius: 0,
  },
  img: {
    width: 20,
    height: 20,
  },
  inputBox: {
    flex: 1,
    borderStyle: 'solid',
    borderColor: 'lightcoral',
    borderTopWidth: 2,
    padding: 0,
  },
  messagesBox: {
    flex: 4,
  },
  talk: {
    flexDirection: 'row',
    alignItems: 'center',
    marginTop: 5,
    // height: 20,
  },
  text: {
    fontSize: 18,
  },
  user: {
    color: 'dodgerblue',
  },
  allbox: {
    flexDirection: 'row',
    alignItems: 'center',
  },
  ss: {
    height: height,
    borderStyle: 'solid',
    borderColor: 'lightcoral',
    backgroundColor: 'lemonchiffon',
    borderRadius: 5,
    borderTopRightRadius: 0,
    borderBottomRightRadius: 0,
    borderWidth: 2,
    justifyContent: 'center',
  },
});

function TextMessage({ name, context }) {
  return (
    <View style={styles.talk}>
      <Text style={[styles.text, styles.user]}>{`${name}: `}</Text>
      <Text style={styles.text}>{context}</Text>
    </View>
  );
}

function AudioMessage({ name, url }) {
  function playAudio() {
    let sound = new Sound(url, '', (err) => {
      if (err) {
        console.log('failed to load the sound', err);
        return;
      }
      sound.play((success) => {
        if (success) {
          console.log('successfully finished playing');
        } else {
          console.log('playback failed due to audio decoding errors');
        }
      });
    });
  }
  return (
    <View style={styles.talk}>
      <Text style={[styles.text, styles.user]}>{`${name}: `}</Text>
      <TouchableOpacity onPressIn={playAudio}>
        <Image style={styles.img} source={require('../img/talk.png')} />
      </TouchableOpacity>
    </View>
  );
}

export default function ChatBox({ users, setUsers }) {
  const [inputText, setInputText] = useState('');
  const [messages, setMessages] = useState([]);
  const [chatroom, setChatroom] = useState();
  const [canTalk, setCanTalk] = useState(false);
  const [canFile, setCanFile] = useState(false);
  const [isShow, setIsShow] = useState(true);
  const flatList = useRef(null);
  const isGoEnd = useRef(true);
  const usersRef = useRef(null);
  const users_tRef = useRef(null);
  let audioPath = AudioUtils.DocumentDirectoryPath + '/test.aac';
  useEffect(() => {
    check();
    creatChatroom(2, changeChatroomBox).then((value) => {
      setChatroom(value);
    });
    // eslint-disable-next-line react-hooks/exhaustive-deps
  }, []);
  useEffect(() => {
    const users_obj = {};
    for (let i = 0, l = users.length; i < l; i++) {
      users_obj[users[i].id] = users[i];
    }
    usersRef.current = users_obj;
    users_tRef.current = [...users];
  }, [users]);
  async function sendText(text) {
    if (text) {
      // console.log(`http://192.168.50.21:8082?context=${text}`,1);
      let response = await fetch(`http://192.168.50.21:8082?context=${text}`);
      const arr = (await response.json()).resp;
      // console.log(typeof text);
      for (const item of arr) {
        console.log(item);
        text = text.replace(item, '*');
      }
      console.log(text);
      chatroom?.sendText({
        text: text,
        done: changeChatroomBox,
      });
    }
  }
  function sendTalk(filePath) {
    chatroom?.previewFile({
      type: 'audio',
      filePath,
      done: function (error, file) {
        if (!error) {
          chatroom?.sendFile({
            file: file,
            done: changeChatroomBox,
          });
        }
      },
    });
  }
  function changeChatroomBox(_, msg) {
    function addMessage(userMessages, item, index) {
      userMessages.messages.push(item);
      const timer = setTimeout(() => {
        userMessages.messages.shift();
        userMessages.timers.shift();
        users_tRef.current[index].messages = userMessages;
        setUsers(users_tRef.current);
      }, 5000);
      userMessages.timers.push(timer);
      if (userMessages.messages.length > 5) {
        userMessages.messages.shift();
        clearTimeout(userMessages.timers[0]);
        userMessages.timers.shift();
      }
    }
    if (msg.type === 'text') {
      const users_t = [...users_tRef.current];
      for (let i = 0, l = users_t.length; i < l; i++) {
        if (users_t[i].id === msg.from) {
          addMessage(users_t[i].messages, msg.text, i);
          console.log('okkk');
          break;
        }
      }
      console.log(users_t[0].messages);
      setUsers(users_t);
    }
    if (msg.type === 'text') {
      let { type, text, from } = msg;
      setMessages(prevState => [...prevState, { type, text, from }]);
    } else if (msg.type === 'file') {
      let { type, file, from } = msg;
      setMessages(prevState => [...prevState, { type, file, from }]);
    }
    goEnd();
  }
  function goEnd() {
    if (isGoEnd.current) {
      flatList.current.scrollToEnd();
    }
  }
  function submit(text) {
    sendText(text);
    setInputText('');
  }
  async function check() {
    const rationale_audio = {
      'title': '获取录音权限',
      'message': 'chat正请求获取麦克风权限用于录音,是否准许',
    };
    const rationale_file = {
      'title': '获取文件读取权限',
      'message': 'chat正请求获取文件读取权限,是否准许',
    };
    const status_audio = await PermissionsAndroid.request(PermissionsAndroid.PERMISSIONS.RECORD_AUDIO, rationale_audio);
    const status_fileRead = await PermissionsAndroid.request(PermissionsAndroid.PERMISSIONS.READ_EXTERNAL_STORAGE, rationale_file);
    const status_fileWrite = await PermissionsAndroid.request(PermissionsAndroid.PERMISSIONS.WRITE_EXTERNAL_STORAGE, rationale_file);
    if (status_audio === 'granted') {
      setCanTalk(true);
    }
    if (status_fileRead === 'granted' && status_fileWrite === 'granted') {
      setCanFile(true);
    }
  }
  async function record() {
    if (!canTalk) {
      Alert.alert('缺少麦克风权限!');
    } else {
      try {
        await AudioRecorder.prepareRecordingAtPath(audioPath);
        const filePath = await AudioRecorder.startRecording();
        console.log('start talk', filePath);
      } catch (error) {
        console.error(error);
      }
    }
  }

  async function stop() {
    if (canTalk) {
      try {
        console.log('over');
        const filePath = await AudioRecorder.stopRecording();
        if (!canFile) {
          Alert.alert('缺少文件存取权限!');
        } else {
          console.log('send talk!');
          sendTalk(filePath);
        }
      } catch (error) {
        console.error(error);
      }
    }
  }
  return (
    <View style={styles.box}>
      <View style={styles.allbox}>
        <TouchableOpacity onPress={() => setIsShow(pre => !pre)} style={styles.ss}>
          <Text style={styles.sss}>{'<>'}</Text>
        </TouchableOpacity>
        {isShow &&
          <View style={styles.chatBox}>
            <View style={styles.messagesBox}>
              <FlatList
                ref={flatList}
                data={messages}
                renderItem={({ item, index }) => {
                  const msg = item;
                  const tname = usersRef.current[msg.from]?.name ?? 'noname';
                  if (msg.type === 'text') {
                    let { text } = msg;
                    return <TextMessage name={tname} context={text} key={text + index} />;
                  } else {
                    let { file } = msg;
                    return <AudioMessage name={tname} url={file.url} key={file.url + index} />;
                  }
                }}
                onScrollBeginDrag={() => { isGoEnd.current = false; }}
                onEndReached={() => { isGoEnd.current = true; }}
                onEndReachedThreshold={1}
              />
            </View>
            <TextInput
              style={styles.inputBox}
              value={inputText}
              onChangeText={(text) => setInputText(text)}
              onSubmitEditing={() => submit(inputText)} />
          </View>}
      </View>
      <TouchableOpacity onPressIn={record} onPressOut={stop} style={styles.img}>
        <Image style={styles.img} source={require('../img/talk.png')} />
      </TouchableOpacity>
    </View>
  );
}
