# -*- coding: utf-8 -*-
from __future__ import print_function  # do not delete this line if you want to save your log file.
import os
from naie.context import Context
import moxing as mox
import shutil
import zipfile
import requests
from naie.datasets import get_data_reference
from naie.context import Context as context

def load_data():
    x_train = None
    y_train = None
    x_validation = None
    y_validation = None
    """
    ====================== YOUR CODE HERE ======================
    Using softcomai data reference api to read data set, read sdk docs for more details.
    e.g.
    from naie.datasets import get_data_reference
    data_reference = get_data_reference(dataset="any_dataset", dataset_entity="entity_of_dataset")
    df = data_reference.to_pandas_dataframe()

    or

    file_paths = data_reference.get_files_paths() # to get data files full path list
    ============================================================
    Parameters
    ----------
    dataset : name of dataset
    dataset_entity : name of dataset entity
    """
    return x_train, y_train, x_validation, y_validation


def model_fn():
    model = None
    """
    ====================== YOUR CODE HERE ======================
    you can write your model function here.
    Example:
    model = RFC(
        n_estimators=int(n_estimators),
        min_samples_split=int(min_samples_split),
        max_features=max(min(max_features, 0.999), 1e-3),
        random_state=2
        )
    ============================================================
    """
    return model


def train(x_train, y_train, model):
    """
    ====================== YOUR CODE HERE ======================
    you can write the main process here.
    there are several api you can use here.
    Example:
    model.fit(x_train, y_train)
    ============================================================
    """
    pass


def save_model(model):
    """
    ====================== YOUR CODE HERE ======================
    write model to the specific model path of train job

    e.g.
    from naie.context import Context
    with open(os.path.join(Context.get_output_path(), 'model.pkl'), 'w') as ff:
        pickle.dump(clf, ff)
    or
    tf.estimator.Estimator(model_dir=Context.get_output_path())  # using tensorflow Estimator
    ============================================================
    """
    pass

def un_zip(file_name, dir):
    zip_file = zipfile.ZipFile(file_name)
    for names in zip_file.namelist():
        zip_file.extract(names, dir)
    zip_file.close()

def score_model(x_validation, y_validation, model):
    score = None
    """
    ====================== YOUR CODE HERE ======================
    there are several api you can use here.
    Example:
    from naie.metrics import report
    with report(True) as log_report:
        log_report.log_property("score", accuracy_score(y_validation, model.predict(x_validation)))
    ============================================================
    """
    return score


def main():
    """
    ====================== YOUR CODE HERE ======================
    you can write the main process here.
    ============================================================
    """

    import stat
    print('当前目录', os.getcwd())
    # COCO
    val2017 = get_data_reference(dataset="COCO", dataset_entity="val2017")
    file_path = val2017.get_files_paths()[0]
    print("----------------file_path:",file_path)
    mox.file.copy(file_path, '/cache/COCO/images/val2017.zip')
    un_zip('/cache/COCO/images/val2017.zip','/cache/COCO/images/')
    os.system("rm -rf /cache/COCO/images/val2017.zip")

    train1= get_data_reference(dataset="COCO",dataset_entity="train20171")
    train1_file_path = train1.get_files_paths()[0]
    print("----------------train1_file_path:",train1_file_path)
    mox.file.copy(train1_file_path, '/cache/COCO/images//train1.zip')
    un_zip('/cache/COCO/images/train1.zip','/cache/COCO/images/')
    os.system("mv /cache/COCO/images/train20171 /cache/COCO/images/train2017")
    os.system("rm -rf /cache/COCO/images/train1.zip")

    train2 = get_data_reference(dataset="COCO",dataset_entity="train20172")
    train2_file_path = train2.get_files_paths()[0]
    print("----------------train2_file_path:",train2_file_path)
    mox.file.copy(train2_file_path, '/cache/COCO/images/train20172.zip')
    un_zip('/cache/COCO/images/train20172.zip','/cache/COCO/images')
   
    os.system("rm -rf /cache/COCO/images/train20172.zip")
    os.system('ls  /cache/COCO/images')
    # os.system('ls -l | grep "^-" | wc -l')
    os.system('ls -l /cache/COCO/images/train2017 | grep "^-" | wc -l')
    # ls -l /cache/COCO/images/train2017 | grep "^-" | wc -l
    os.chmod('/cache/COCO/images/train2017',stat.S_IRWXO)
    os.chmod('/cache/COCO/images/train2017',stat.S_IRWXU)

    annotations = get_data_reference(dataset="COCO",dataset_entity="annotations")
    annotations_file_path = annotations.get_files_paths()[0]
    print("----------------anno_file_path:",annotations_file_path)
    mox.file.copy(annotations_file_path, '/cache/COCO/annotations.zip')
    un_zip('/cache/COCO/annotations.zip','/cache/COCO/')
    os.system("rm -rf /cache/COCO/annotations.zip")

    annotations = get_data_reference(dataset="COCO",dataset_entity="annotations")
    annotations_file_path = annotations.get_files_paths()[0]
    print("----------------anno_file_path:",annotations_file_path)
    mox.file.copy(annotations_file_path, '/cache/COCO/annotations.zip')
    un_zip('/cache/COCO/annotations.zip','/cache/COCO/')
    os.system("rm -rf /cache/COCO/annotations.zip")

    person_detection_results = get_data_reference(dataset="COCO",dataset_entity="person_detection_results")
    person_detection_results_file_path = person_detection_results.get_files_paths()[0]
    print("----------------person_detection_results_file_path:",person_detection_results_file_path)
    mox.file.copy(person_detection_results_file_path, '/cache/COCO/person_detection_results.zip')
    un_zip('/cache/COCO/person_detection_results.zip','/cache/COCO/')
    os.system("rm -rf /cache/COCO/person_detection_results.zip")


    os.system("pip install -r requirements.txt")
    print('wyj的训练代码正式开始')
    print("获取当前路径：")
    print('ROOT: ',os.getcwd())  # /home/ma-user/work/Algorithm/algoTransPose

    os.chdir("./lib/nms")
    os.system("python setup_linux.py build_ext --inplace")
    # os.system("make")
    print('make之后的当前路径')
    print('make root:', os.getcwd())
    os.chdir("../../")
    print('切换之后的路径')
    print('os.chdir root:', os.getcwd())
    # cocoapi
    os.chdir("./cocoapi/PythonAPI")
    os.system("make install")
    os.system("python setup.py install --user")
    os.chdir("../../")
    os.system("python tools/train.py --cfg experiments/coco/CNT_H/CNT_H_w48_256x192_stage3_1_4_d192_h384_relu_enc4_mh1.yaml")
    # 官方保存模型 2
    file_name = 'final_state.pth'
    root = '/cache/'
    mox.file.copy(root+file_name,os.path.join(Context.get_output_path(), file_name))
    mox.file.copy(root+'checkpoint.pth',os.path.join(Context.get_output_path(), 'checkpoint.pth'))
    mox.file.copy(root+'model_best.pth',os.path.join(Context.get_output_path(), 'model_best.pth'))
    # mox.file.copy(os.path.join(Context.get_output_path(), file_name),'./trained_model/final_state.pth.tar')
    print("save model:",os.path.join(Context.get_output_path(), file_name))
    print("save model:",os.path.join(Context.get_output_path(), file_name))
    
    os.system("python tools/test.py --cfg experiments/coco/CNT_H/CNT_H_w48_256x192_stage3_1_4_d192_h384_relu_enc4_mh1.yaml TEST.USE_GT_BBOX True")


if __name__ == "__main__":
    main()
