import os
import requests
import zipfile
import pydicom
from requests.auth import HTTPBasicAuth
from database.tools.application_batch_test import db
from database.models.mri import Mri
from database.models.patient import Patient
from database.models.study_id import Study_id
from pyorthanc import find, Orthanc
orthanc = Orthanc('http://localhost:8042',
                  username='orthanc', password='orthanc')

global MAPPING_TABLE
MAPPING_TABLE={}

         
def UploadBuffer(dicom):
  
    url='http://localhost:8042'
    username='orthanc'
    password='orthanc'
    
    auth = HTTPBasicAuth(username, password)
    r = requests.post('%s/instances' % url, auth = auth, data = dicom)
    try:
        r.raise_for_status()
    except:
        return

    info = r.json()
    # 上传完毕
    r2 = requests.get('%s/instances/%s/tags?short' % (url, info['ID']),
                      auth = auth)
    r2.raise_for_status()
    tags = r2.json()
    study_instance_uid=tags['0020,000d']
    # 需要构建一个 study id 和study instance id的映射表    
    
    Study_info={'PatientName': tags['0010,0010'],'sex':tags['0010,0040'],'borndate':tags['0010,0030'],'fid': tags['0008,0050'],'pid':tags['0010,0020'],'Study_id':tags['0020,000d'],'check_date':tags['0008,0020'],'hospital':tags['0008,0080'],'device':tags['0008,0070'],'modality':tags['0008,0060']} 
    pid = Study_info['pid']
    study_id = Study_info['Study_id']
    fid = Study_info['fid']
    borndate = Study_info['borndate']
    check_date = Study_info['check_date']
    name = Study_info['PatientName']
    sex = Study_info['sex']
    big=int(check_date[:4])
    small=int(borndate[:4])
    age=big-small
    hospital = Study_info['hospital']
    device=Study_info['device']
    modality=Study_info['modality']
    orthanc_id=info['ParentStudy']
    
    # 存储当前的study id 和 study instance id 
    MAPPING_TABLE[info['ParentStudy']] = study_id 

    # 根据检查病人是否存在
    patient = Patient.query.filter(Patient.pid == pid).first() 
    if patient is None:
        #不存在则添加
        newpatient = Patient(name,sex,age,pid,hospital,borndate)
        db.session.add(newpatient)
        db.session.commit()
        patient = Patient.query.filter(Patient.pid == pid).first()
    #添加mri记录
        # 检查mri时候存在，不过没必要数据库设置F号唯一即可，重复插入会报错
    iffid = Patient.query.filter(Mri.fid == fid).first()  
    if iffid is None:   
        newmri = Mri(fid,patient.id,study_id,check_date,device,modality)
        db.session.add(newmri)
        db.session.commit()
        
    ifstudyid = Study_id.query.filter(Study_id.study_id == study_id).first() 
    if ifstudyid is None:    
      newstudy_id = Study_id(study_id,orthanc_id)
      db.session.add(newstudy_id)
    
    db.session.query(Mri).filter_by(study_id=study_id).update({'available': True})
    db.session.commit()
    return study_instance_uid  

def upload_slice():
  for key,value in MAPPING_TABLE.items():
    slice_count=0

    patients = find(
          orthanc=orthanc,
          study_filter=lambda s: s.id_ == key
      )
    for patient in patients:
      # dic=patient.get_main_information()
      # print(dic) 
      for study in patient.studies:
        if study.id_ == key:
          # dic=study.get_main_information()
          # print(dic)
          for series in study.series:
              for instance in series.instances:
                # dic=instance.get_main_information()
                # print(dic)
                slice_count+=1
                
    # 上传slice到mysql
    Mri.query.filter(Mri.study_id == value).update({'slice': slice_count})
    # 提交会话
    db.session.commit()

def unzip_file(zip_src, dst_dir):
    # r = zipfile.is_zipfile(zip_src) 
    fz = zipfile.ZipFile(zip_src, 'r')
    for file in fz.namelist():
        fz.extract(file, dst_dir)   
    return dst_dir  
        
def UploadFile(path):
    # 如果上传的单个文件不是dicom文件，则跳过
    try:
      dcm = pydicom.read_file(path)
      series=dcm[0X0008, 0X103e].value
    except:
      print(' not a valid DICOM file, ignoring it')
      return 6000 # 代表不是dicom文件
    
    # flag=0时不上传，flag=1时上传
    flag=0
    if 't2' in series and 'tra_dark-fluid' in series and not 'daohang' in series and 'PosDisp' not in series:
        flag=1
    elif 't1' in series and 'tra' in series and 'daohang' not in series and not 'PosDisp' in series:
        flag=1
    elif "ADC" in series and not 'daohang' in series and 'PosDisp' not in series:
        flag=1
    if flag==1:
      with open(path, 'rb') as f:
          dicom = f.read()
          # print('Uploading: %s (%dMB)' % (path, len(dicom) / (1024 * 1024)))
          Study_id=UploadBuffer(dicom)
      return Study_id
      


# 放入文件夹
def upload_zip_or_dicom(path):   
    for root, dirs, files in os.walk(path):
      for name in files:
        # 如果当前文件是zip文件，此时由于害怕zip里面还有zip，这时候得用递归思想，重新在最开始进行判断
        if os.path.splitext(name) [1]==".zip":
          # 递归
          study_id=upload_fun(os.path.join(root,name)) 
          if study_id!=None:
            Study_id=study_id
        # 如果当前文件是single文件(包含dicom和一些不需要上传的文件)
        else:
          study_id=UploadFile(os.path.join(root, name))
          if study_id!=None and study_id!=6000:
            Study_id=study_id
            
    return Study_id
  
def upload_fun(path): 
    # 如果上传的是单个文件 or zip  
    if os.path.isfile(path):
      extension = os.path.splitext(path) [1]
      # 如果上传的是 zip
      if extension == '.zip':
        # 解压到同名文件夹
        unzip_path=unzip_file(zip_src=path, dst_dir=path.replace(".zip",""))
        # 遍历解压后的文件夹  
        Study_id=upload_zip_or_dicom(unzip_path)
      # 如果上传的是单个文件  （此处只允许上传zip或单个dicom文件，如果上传rar、tar之类的压缩包或者info文件会跳过）
      else:
        study_id=UploadFile(path)
        if study_id!=None:
          Study_id=study_id   
    # 如果上传的是文件夹
    elif os.path.isdir(path):
      files_list = os.listdir(path)
      if len(files_list) > 0:
        Study_id=upload_zip_or_dicom(path)
      else:
        print("当前文件夹为空！！！")
    return Study_id
    
def upload2orthanc(path): 
    Study_id=upload_fun(path)    
    # 更新slice信息
    upload_slice()
    return Study_id
 
if __name__ == '__main__':   
    state=upload2orthanc(path=r"C:\Users\AMYGDALA\Desktop\data\Grade_2_invasion\Grade_2_invasion\QIN_LIAN_DONG")
      # Study_id=upload2orthanc(path=r"C:\Users\AMYGDALA\Desktop\data\Grade_2_noninvasion\Grade_2_noninvasion")
      # state=upload2orthanc(path=r"C:\Users\AMYGDALA\Desktop\data\Grade_2_invasion\Grade_2_invasion")