import json
import os
import shutil
import time
import uuid

from mycelery.man import app
from dingxiang_doctor import settings
from django.core.mail import send_mail
from dingxiang_doctor.utils.myredis import r
from dingxiang_doctor.utils.baidu_api import baidu
from celery import shared_task
from elasticsearch import Elasticsearch
from doctor.models import Doctor, Hospital
from disease.models import Disease

@app.task(name='send_email')
def send_email_to(email, message):
    print("111111111111111")
    subject = "审批结果"
    from_email = settings.EMAIL_FROM
    to_email = [email]
    send_status = send_mail(subject, message, from_email, to_email)
    print(send_status)
    return send_status

# @app.task(name='idcard')
@shared_task
def idcard():
    baidu_list = r.list_lrange("baidu_url_list", 0, 10)
    for i in baidu_list:
        url = i.decode()
        mes = baidu.idcard(url)
        r.set_str(url, json.dumps(mes, ensure_ascii=False))
        r.list_del("baidu_url_list", url)

# 处理es数据同步
@shared_task
def es_update():
    es = Elasticsearch("http://114.55.177.21:9200")
    # 处理数据库新增数据
    doctor_id = r.get_str("doctor_id")
    disease_id = r.get_str("disease_id")
    hospital_id = r.get_str("hospital_id")
    doctors = Doctor.objects.filter(id__gt=doctor_id)
    diseases = Disease.objects.filter(id__gt=disease_id)
    hospitals = Hospital.objects.filter(id__gt=hospital_id)
    if doctors:
        for doctor in doctors:
            es_info = es.index(index='dingxiang_doctor', body={
                'id': doctor.id,
                'type': 'doctor',
                'name': doctor.name,
                'hospital': doctor.hospital.id,
                'department': doctor.department.id
            })
            es_id = es_info.get("_id")
            doctor.es_id = es_id
            doctor.save()
            r.set_str("doctor_id", doctor.id)
    if diseases:
        for disease in diseases:
            es_info = es.index(index='dingxiang_doctor', body={
                'id': disease.id,
                'type': 'disease',
                'name': disease.name,
                'department': disease.department.id
            })
            es_id = es_info.get("_id")
            disease.es_id = es_id
            disease.save()
            r.set_str("disease_id")
    if hospitals:
        for hospital in hospitals:
            es_info = es.index(index='dingxiang_doctor', body={
                'id': hospital.id,
                'type': 'hospital',
                'name': hospital.name,
            })
            es_id = es_info.get("_id")
            hospital.es_id = es_id
            hospital.save()
            r.set_str("hospital_id", hospital.id)

    # 处理数据库修改数据
    hospital_update_list = r.get_key(r"hospital:*")
    for i in hospital_update_list:
        data = i.decode()
        _index = data.find(":")
        es_id = data[_index + 1:]
        info = r.hash_getall(data)
        info = {key.decode(): value.decode() for key, value in info.items()}
        es.update(index="dingxiang_doctor", id=es_id, body={
            "doc": {
                "id": info.get("id"),
                "type": info.get("type"),
                "name": info.get("name")
            }
        })

    # 处理数据库删除数据
    length = r.list_len("delete_list")
    for i in range(length):
        data = r.list_lrange("delete_list", 0, 1)
        for j in data:
            es.delete(index="dingxiang_doctor", id=j.decode())
        r.list_lpop("delete_list")

# 判断文件是否传输失败
@shared_task
def remove_file():
    file_list = r.zset_zrangebyscore("file_time", 0, time.time() - 1800)
    print(file_list)
    for i in file_list:
        file_name = i.decode()
        file_size = r.zset_zscore("file_size", file_name)
        file_path = "../static/upload/" + file_name
        print(file_path)
        print(os.path.exists("../static/upload/4963100882267803649LINKDATA0.BIN"))
        if os.path.exists(file_path):
            total_size = 0
            for dirpath, dirnames, filenames in os.walk(file_path):
                for f in filenames:
                    fp = os.path.join(dirpath, f)
                    if os.path.exists(fp):
                        total_size += os.path.getsize(fp)
            # 获取文件数量
            file_number = sum(f.is_file() for f in os.scandir(file_path))
            if total_size == file_size:
                print("开始合并")
                fname = uuid.uuid4().hex
                arr = file_name.split(".")
                # 合并
                with open(f'static/upload/{fname}.{arr[-1]}', 'wb') as f:
                    for i in range(int(file_number)):
                        with open(f'static/upload/{file_name}/{i}', 'rb') as f1:
                            f.write(f1.read())
            # 删除临时文件夹
            if os.path.exists(file_path):
                shutil.rmtree(file_path)

        # 删除redis中记录
        r.zset_zrem("file_time", f"{file_name}")
        r.zset_zrem("file_size", f"{file_name}")
