from django.test import TestCase
# from extra_apps.IMAGE_QUERY import extract
from ad_manage.models import Advertisement
# Create your tests here.
from django.http import JsonResponse
import pickle
from celery import task
import datetime
from django.db.models import Q
from extra_apps.IMAGE_QUERY.extract import extract_feat
from image_query.maxpool_conv_5 import ext_pre
from keras import backend as k
from skimage.measure import block_reduce
import numpy as np
import os

from extra_apps.IMAGE_QUERY.predict import predict

import logging

conf = k.tf.ConfigProto(device_count={'CPU': 2},
                        intra_op_parallelism_threads=2,
                        inter_op_parallelism_threads=2)
k.set_session(k.tf.Session(config=conf))




def get_feature(request):
    advertisements = Advertisement.objects.exclude(local_img='暂无').exclude(local_img='')[0:1000]
    feature_list = []
    for advertisement in advertisements:
        if advertisement.image_feature != None:
            if len(advertisement.image_feature) > 0:
                feature = pickle.loads(advertisement.image_feature)
                feature_list.append(feature)

    return JsonResponse({'json_data': feature_list})


@task
def save_feature():
    advertisements = Advertisement.objects.exclude(local_img='暂无').exclude(local_img='').exclude(
        image_feature__isnull=False)
    print("start")
    print(advertisements.count())
    for advertisement in advertisements:
        try:
            img_path = '/repo/smspider/images/' + advertisement.model_from + '/' + advertisement.local_img
            feat = extract_feat(img_path)
            img_feat = feat.tolist()
            advertisement.image_feature = pickle.dumps(img_feat)
            advertisement.save()
        except Exception as e:
            print(e)
    print("end")




@task
def save_tag():
    # image_path = 'C:/Users/Administrator/Desktop/test.jpg'
    # la = predict(image_path)

    logging.info("save_tag is started" + str(datetime.datetime.now()))
    advertisements = Advertisement.objects.exclude(local_img='暂无').exclude(local_img='').filter(
        Q(model_from='radar_app'))
    for advertisement in advertisements:
        image_path = '/repo/smspider/images/' + advertisement.model_from + '/' + advertisement.local_img
        try:
            la = predict(image_path)
            for i in range(10):
                # 用format封装image_tag_1~10，并赋值，省去for循环#
                s = '{}{}'.format('image_tag_', i)
                setattr(advertisement, s, la[i])
            advertisement.save()
        except Exception as e:
            print(e)
    # import time
    # time.sleep(3)
    print("save_tag is end")


# 提取图片标签核心代码 #
def save_tag_test(request):
    # image_path = 'C:/Users/Administrator/Desktop/test.jpg'
    # la = predict(image_path)
    logging.info("save_tag is started")
    advertisements = Advertisement.objects.exclude(local_img='暂无').exclude(local_img='').filter(image_tag_1='暂无')[40000:]
    import pathlib
    for i in range(advertisements.count()):
        image_path = '/repo/smspider/images/' + advertisements[i].model_from + '/' + advertisements[i].local_img
        path = pathlib.Path(image_path)
        if not path.exists():
            logging.info("null image:" + image_path)
            continue
        logging.info(image_path)
        try:
            la = predict(image_path)
            advertisement = advertisements[i]
            for j in range(10):
                # 用format封装image_tag_1~10，并赋值，省去for循环#
                s = '{}{}'.format('image_tag_', j + 1)
                setattr(advertisement, s, la[j])
            advertisement.save()
        except Exception as e:
            nowTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')  # 现在
            print(nowTime)
            print("save_tag_test Exception-->")


            print(e)
    return JsonResponse({'json_data': "success"})


# 提取图片标签核心代码 #
# def save_feature_test(request):
#
#     print("save_feature_test is started" + str(datetime.datetime.now()))
#     advertisements = Advertisement.objects.exclude(local_img='暂无').exclude(local_img='').filter(
#         Q(image_feature__isnull=False)|~Q(image_tag_1= '暂无'))[0:1000]
#
#     for ads in advertisements:
#             img_path = '/repo/smspider/images/' + ads.model_from + '/' + ads.local_img
#             if not os.path.exists(img_path):
#                 logging.info("null image:" + img_path)
#                 continue
#             ad = ads
#             labels, percent, norm_feat = ext_pre(img_path)
#             img_feat = norm_feat.tolist()
#             ad.image_feature = pickle.dumps(img_feat) # 赋值特征码 #
#             for j in range(10):
#                 # 用format封装image_tag_1~10，并赋值，省去for循环#
#                 s = '{}{}'.format('image_tag_', j + 1)
#                 setattr(ad, s, labels[j])           # 赋值标签 #
#             ad.save()
#     return JsonResponse({'json_data': advertisements.count()})

def save_feature_test(request):
    logging.info("save_feature_test is started" + str(datetime.datetime.now()))
    advertisements = Advertisement.objects.exclude(local_img='暂无').exclude(local_img='').filter(
        image_feature__isnull=True)[0:40000]
    err_img = 0
    for i in range(advertisements.count()):
        try:
            img_path = '/repo/smspider/images/' + advertisements[i].model_from + '/' + advertisements[i].local_img
            if not os.path.exists(img_path):
                print("null image:" + img_path)
                err_img = err_img +1
                continue
            feat = extract_feat(img_path)
            img_feat = feat.tolist()
            ad = advertisements[i]
            ad.image_feature = pickle.dumps(img_feat)
            ad.save()
        except Exception as e:
            print(e)
    return JsonResponse({'json_data':err_img})




# 将特征码保存到npy文件 #
@task
def save_feature_npy(request):

    advertisement = Advertisement.objects.all().values_list("image_feature")
    all_count = advertisement.count()//1000
    r = advertisement.count()%1000
    for page in range(all_count):
        sa_array = []
        for advertisement_feature in advertisement[page*1000:(page+1)*1000]:
            pl = pickle.loads(advertisement_feature[0])
            sa_array.append(pickle.loads(advertisement_feature[0]))
        same_feature_arr = np.array(sa_array)
        np.save('/repo/smspider/images/feature_arr.npy', same_feature_arr)
        data = np.load('/repo/smspider/images/feature_arr.npy')
