from flask import Flask
from flask_restful import reqparse, abort, Api, Resource
from threading import Thread
from mysql_config import db_config
from pymysqlpool import ConnectionPool
from PIL import Image
from io import BytesIO
import requests
import time
import pymysql
import logging
from urllib.parse import quote
import redis
import uuid
import base64
import pickle

app = Flask(__name__)
api = Api(app)

handler = logging.FileHandler('app.error.log', encoding='UTF-8')
# handler.setLevel(logging.DEBUG)
logging_format = logging.Formatter(
    '%(asctime)s - %(levelname)s -%(pathname)s- %(filename)s - %(funcName)s - %(lineno)s - %(message)s')
handler.setFormatter(logging_format)
app.logger.setLevel(logging.DEBUG)
app.logger.addHandler(handler)

infologger = logging.getLogger("app.info.log")
infologger.setLevel(level=logging.INFO)
log_file_name = 'app-info.' + time.strftime('%Y-%m-%d', time.localtime(time.time())) + '.log'
infohandler=logging.FileHandler(log_file_name, encoding='UTF-8')
infohandler.setFormatter(logging_format)
infologger.addHandler(infohandler)

parser = reqparse.RequestParser()

cache_redis_config = {'host': 'r-2ze5f756d93811a4.redis.rds.aliyuncs.com', 'password': 'iAIhSc26', 'port': 6379, 'decode_responses': True}
cache_redis_pool = None
cache_server = 'http://172.16.237.6/image/'

_feature_base_url = 'http://172.16.237.71/getfeatures?ossurl='
# _feature_base_url = 'http://172.16.237.116/getfeatures?ossurl='
_veer_image_base_url = 'http://bj-feiyuantu.oss-cn-beijing.aliyuncs.com/'
_vcg_image_base_url = 'http://bj-feiyuantu.oss-cn-beijing.aliyuncs.com/'
# _quanjing_image_base_url = 'http://elephant-spider.oss-cn-beijing.aliyuncs.com/'
_quanjing_image_base_url = 'http://elephant-spider.vpc100-oss-cn-beijing.aliyuncs.com/'
# _surf_uri = 'http://127.0.0.1:5001/surf/getscore'
# _surf_uri = 'http://172.16.237.73/surf/getscore'
_surf_uri = 'http://172.16.237.124/surf/getscore'

hosts = [

    # for tencent
    #('vcg_editor', "172.16.237.32", "8080"),
    #('vcg_editor', "172.16.237.34", "8080"),
    #('vcg_editor', "172.16.237.35", "8080"),
    #('vcg_creative', "172.16.237.36", "8080"),
    #('vcg_creative', "172.16.237.40", "8080"),
    #('veer', "172.16.237.42", "8080"),
    #('veer', "172.16.237.43", "8080"),
    #('veer', "172.16.237.46", "8080"),
    #('veer', "172.16.237.47", "8080"),

    # for tencent
    #('vcg_editor', "172.16.237.136", "80"),
    #('vcg_editor', "172.16.237.135", "80"),
    #('vcg_editor', "172.16.237.134", "80"),
    #('vcg_creative', "172.16.237.133", "80"),
    #('vcg_creative', "172.16.237.132", "80"),
    #('veer', "172.16.237.69", "80"),
    #('veer', "172.16.237.50", "80"),
    #('veer', "172.16.237.49", "80"),
    #('veer', "172.16.237.48", "80"),

    # for tencent
    #('vcg_editor', "172.16.237.246", "80"),
    #('vcg_editor', "172.16.237.247", "80"),
    #('vcg_editor', "172.16.237.249", "80"),
    #('vcg_creative', "172.16.237.250", "80"),
    #('vcg_creative', "172.16.237.251", "80"),
    #('veer', "172.16.237.252", "80"),
    #('veer', "172.16.237.1", "80"),
    #('veer', "172.16.237.2", "80"),
    #('veer', "172.16.237.3", "80"),

    # for tencent
    # ('vcg_editor', "172.16.237.32", "80"),
    # ('vcg_editor', "172.16.237.34", "80"),
    # ('vcg_editor', "172.16.237.35", "80"),
    # ('vcg_creative', "172.16.237.36", "80"),
    # ('vcg_creative', "172.16.237.40", "80"),
    # ('veer', "172.16.237.42", "80"),
    # ('veer', "172.16.237.43", "80"),
    # ('veer', "172.16.237.46", "80"),
    # ('veer', "172.16.237.47", "80"),

    # for eagle
    # ('vcg_editor', "172.16.237.99", "80"),
    # ('vcg_editor', "172.16.237.98", "80"),
    # ('vcg_editor', "172.16.237.97", "80"),
    # ('vcg_creative', "172.16.237.96", "80"),
    # ('vcg_creative', "172.16.237.95", "80"),
    # ('veer', "172.16.237.94", "80"),
    # ('veer', "172.16.237.93", "80"),
    # ('veer', "172.16.237.92", "80"),
    # ('veer', "172.16.237.91", "80"),
    # ('500px', "172.16.237.90", "80"),
    # ('500px', "172.16.237.89", "80"),
    ('quanjing', "172.16.237.88", "80"),
    ('quanjing', "172.16.237.78", "80")
]


class myThread(Thread):
    def __init__(self, name, url):
        Thread.__init__(self)
        self.result = {}
        self.name = name
        self.url = url

    def run(self):
        # print("开始线程：" + self.name)
        self.result[self.name] = work(self.url)
        # print("退出线程：" + self.name)

    def get_result(self):
        try:
            return self.result  # 如果子线程不使用join方法，此处可能会报没有self.result的错误
        except Exception as e:
            app.logger.error(e)
            return None


def work(url):
    try:
        r = requests.get(url, timeout=5)
        if r.status_code == 200:
            return r.json()
        else:
            app.logger.error('有一个n2服务异常，返回结果为：' + r.text)
            return ''
    except Exception as e:
        app.logger.error('有一个n2服务挂了，详情如下：')
        app.logger.error(e)
        # app.logger.error(url)
        return ''


def get_n2_results_by_multithreading(engine_list, vector, n2_size):
    global hosts
    uri = 'ann?vector=' + vector + '&input_k=' + n2_size
    threads = []
    for (category, host, port) in hosts:
        url = "http://" + host + ":" + port + "/" + uri
        if category in engine_list or 'all' in engine_list:
            t = myThread(category + "-" + host, url)
            t.start()
            threads.append(t)

    results = []
    for t in threads:
        t.join()
        results.append(t.get_result())

    return results


def parseResult(results):
    res = []
    try:
        if len(results) > 0:
            for aresult in results:
                if len(aresult) > 0:
                    for (k, v) in aresult.items():
                        try:
                            vfrom = k.split('-')[0]
                            if 'status' in v.keys() and v['status'] == '200':
                                if len(v['ids']) > 0:
                                    for kk, vv in v['ids'].items():
                                        an = {}
                                        if vv[0]:
                                            an["from"] = vfrom
                                            an["id"] = vv[0]
                                            an["score"] = vv[1]
                                            res.append(an)
                            else:
                                app.logger.error('部分 解析N2服务器错误：' + k + '服务器返回：' + v)
                        except Exception as e:
                            app.logger.error('部分 解析N2服务器错误：' + k + '服务器返回：' + v)
                            app.logger.error(e)
                            continue
        return res
    except Exception as e:
        app.logger.error('解析 N2服务器错误：', results)
        app.logger.error(e)

        return res


def connection_pool(pool_name):
    # Return a connection pool instance
    pool = ConnectionPool(**db_config[pool_name])
    pool.connect()
    return pool


def getquanjingres(ids):
    try:
        with connection_pool('qungjing').cursor() as cursor:
            # Read a single record
            sql = "SELECT `QJ_id`,`oss_url` FROM `quanjing` WHERE `QJ_id` in ({ids})".format(
                ids='"' + '","'.join(ids) + '"'
            )
            cursor.execute(sql, )
            result = cursor.fetchall()
            return result
    except Exception as e:
        app.logger.error('quanjing DB error：')
        app.logger.error(e)


def parsequanjingres(data):
    result = {}
    try:
        if len(data) > 0:
            for one in data:
                if len(one['oss_url']) > 5:
                    result[str(one['QJ_id'])] = one['oss_url']

            return result
        else:
            return result
    except Exception as e:
        app.logger.error("解析 quanjing 资源服务结果 错误")
        app.logger.error(e)
        return result


def get500pxres(ids):
    try:
        with connection_pool('500px_db').cursor() as cursor:
            # Read a single record
            sql = "SELECT `photo_id`,`url` FROM `resource_oss_link` WHERE `photo_id` in ({ids})".format(
                ids='"' + '","'.join(ids) + '"'
            )
            cursor.execute(sql, )
            result = cursor.fetchall()
            return result

    except Exception as e:
        app.logger.error('500px_db DB error：')
        app.logger.error(e)


def parse500pxres(data):
    result = {}
    try:
        if len(data) > 0:
            for one in data:
                if len(one['url']) > 5:
                    result[str(one['photo_id'])] = one['url']

            return result
        else:
            return result
    except Exception as e:
        app.logger.error("解析 500px 资源服务结果 错误")
        app.logger.error(e)
        return result


def getvcgres(ids):
    try:
        with connection_pool('res_image').cursor() as cursor:
            # Read a single record
            sql = "SELECT id,oss_800,oss_400 FROM `res_image` WHERE id in ({ids}) ORDER BY id ASC ".format(
                ids=','.join(ids)
            )
            cursor.execute(sql, )
            result = cursor.fetchall()
            return result

    except Exception as e:
        app.logger.error('res_image DB error：')
        app.logger.error(e)


def parsevcgres(data):
    result = {}
    try:
        if len(data) > 0:
            for one in data:
                if len(one['oss_400']) > 5:
                    result[str(one['id'])] = one['oss_400']
                else:
                    result[str(one['id'])] = one['oss_800']

            return result
        else:
            return result
    except Exception as e:
        app.logger.error("解析 vcg 资源服务结果 错误")
        app.logger.error(e)
        return result


def getveerres(ids):
    try:
        idsStr = ','.join(ids)
        # r = requests.get("http://172.16.240.222:9101/indexing/imageViewByIds?ids=" + idsStr, timeout=5)
        r = requests.get("http://veerservice.veer.com/indexing/imageViewByIds?ids=" + idsStr, timeout=5)
        return r.json()
    except Exception as e:
        app.logger.error("获取 veer 资源服务错误")
        app.logger.error(e)
        return False


def parseveerres(data):
    result = {}
    try:
        if len(data['data']) > 0:
            for one in data['data']:
                if len(one['oss400']) > 5:
                    result[str(one['id'])] = one['oss400']
                else:
                    result[str(one['id'])] = one['oss800']

            return result
        else:
            return result
    except Exception as e:
        app.logger.error("解析 veer 资源服务结果 错误")
        app.logger.error(e)
        return result


def resparse(res):
    veerIds = []
    vcgIds = []
    pxIds = []
    quanjingIds = []
    veer_result = {}
    vcg_result = {}
    px_result = {}
    quanjing_result = {}
    for oneres in res:
        if oneres['from'] == 'veer':
            veerIds.append(oneres['id'])
        elif oneres['from'] == '500px':
            pxIds.append(oneres['id'])
        elif oneres['from'] == 'quanjing':
            quanjingIds.append(oneres['id'])
        else:
            vcgIds.append(oneres['id'])

    if len(veerIds) > 0:
        veer_tmp = getveerres(veerIds)
        veer_result = parseveerres(veer_tmp)
    if len(pxIds) > 0:
        px_tmp = get500pxres(pxIds)
        px_result = parse500pxres(px_tmp)
    if len(quanjingIds) > 0:
        quanjing_tmp = getquanjingres(quanjingIds)
        quanjing_result = parsequanjingres(quanjing_tmp)
    if len(vcgIds) > 0:
        vcg_tmp = getvcgres(vcgIds)
        vcg_result = parsevcgres(vcg_tmp)

    result = []
    for aresult in res:
        if aresult['id'] in veer_result:
            aresult['img_url'] = _veer_image_base_url + veer_result[aresult['id']]
        elif aresult['id'] in px_result:
            aresult['img_url'] = px_result[aresult['id']]
        elif aresult['id'] in quanjing_result:
            aresult['img_url'] = _quanjing_image_base_url + quanjing_result[aresult['id']]
        elif aresult['id'] in vcg_result:
            aresult['img_url'] = _vcg_image_base_url + vcg_result[aresult['id']]
        else:
            aresult['img_url'] = ''
        result.append(aresult)

    return result


def filterOfflineImages(res):
    try:
        headers = {'Content-Type': 'application/json'}
        imgIds = list(map(lambda x: x['id'], res))
        r = requests.post('http://resourceservice.vcg.com/resImage/listByIds', headers=headers, json=imgIds, timeout=5)
        if r.status_code == 200:
            result = r.json()
            if len(result['data']) > 0:
                filterlist = list(map(lambda x: str(x['id']), result['data']))
                res = list(filter(lambda x: x['id'] in filterlist, res))
    except Exception as e:
        print('API ERROR ：http://resourceservice.vcg.com/resImage/listByIds')
        print(e)
        pass

    return res


def getCacheRedisPool():
    global cache_redis_pool
    if cache_redis_pool is None:
        cache_redis_pool = redis.ConnectionPool(**cache_redis_config)
        return cache_redis_pool
    return cache_redis_pool


def cacheImage(key, val, ex=60):
    pool = getCacheRedisPool()
    r = redis.Redis(connection_pool=pool)
    return r.set(key, value=val, ex=ex)


clf = None

with open('./model.pcl', 'rb') as f:
    clf = pickle.load(f)


def getclf(n2_value,surf_value):
    global clf
    return int(clf.predict([[n2_value, surf_value]])[0])


def doclf(x):
    x['is_pirate'] = getclf(x['n2_score'], x['surf_score'])
    return x


class CopyrightAnalyzer(Resource):
    def get(self):
        myparser = parser.copy()
        myparser.add_argument('img_url', type=str, required=True)
        myparser.add_argument('engine', type=str)
        myparser.add_argument('n2_size', type=int)
        myparser.add_argument('result_size', type=int)
        myparser.add_argument('img_size', type=int)
        myparser.add_argument('surf_enable', type=int)
        myparser.add_argument('model_enable', type=int)
        myparser.add_argument('verify_img', type=int)
        myparser.add_argument('username', type=str)
        args = myparser.parse_args()
        if args['engine'] is None:
            args['engine'] = 'vcg_editor,vcg_creative,veer'
        if args['n2_size'] is None:
            args['n2_size'] = 1
        elif args['n2_size'] <= 0 or args['n2_size'] > 100:
            args['n2_size'] = 1
        if args['result_size'] is None:
            args['result_size'] = 1
        elif args['result_size'] <= 0 or args['result_size'] > 100:
            args['result_size'] = 1
        if args['img_size'] is None:
            args['img_size'] = 641
        if args['surf_enable'] is None:
            args['surf_enable'] = 1
        if args['model_enable'] is None:
            args['model_enable'] = 1
        if args['verify_img'] is None:
            args['verify_img'] = 1
        if args['username'] is None:
            args['username'] = 'unknown'

        final_result = {
            'code': 200,
            'data': [],
            'msg': 'OK'
        }

        try:
            r = requests.get(args['img_url'], timeout=10)
            if r.status_code != 200:
                app.logger.error("img_url 访问错误：" + args['img_url'] + str(r.status_code))
                final_result['code'] = 500
                final_result['msg'] = 'img_url: ' + args['img_url'] + ' 服务器无法访问该图片 请核实！'
                return final_result, 200
            else:

                im = Image.open(BytesIO(r.content))
                img_format = str(im.format).lower()

                try:

                    if args['verify_img'] == 1:
                        w, h = im.size
                        max_v = max(w, h)
                        min_v = min(w, h)
                        if min_v > args['img_size']:
                            app.logger.error("图片尺寸不合格：[" + str(w) + "," + str(h) + "]" + args['img_url'])
                            final_result['code'] = 500
                            final_result['msg'] = "图片尺寸不合格，宽度应小于等于640px"
                            return final_result, 200
                        if w*h < 2205 or min_v/max_v < 0.3062:
                            app.logger.error("图片尺寸不合格：[" + str(w) + "," + str(h) + "]" + args['img_url'])
                            final_result['code'] = 500
                            final_result['msg'] = "图片尺寸不合格，w*h < 2205 or w/h < 0.3062"
                            return final_result, 200
                        img_format = str(im.format).lower()
                        if img_format not in ['jpeg', 'png']:
                            app.logger.error("图片格式不正确：[" + img_format + "] " + args['img_url'])
                            final_result['code'] = 500
                            final_result['msg'] = "图片格式不正确：[" + img_format + "], 仅支持格式：jpg、png"
                            return final_result, 200

                    # cache image
                    im.thumbnail((299, 299), Image.ANTIALIAS)
                    buf = BytesIO()
                    im.save(buf, format=img_format, quality=75)
                    buf.seek(0)
                    imgb64str = base64.b64encode(buf.getvalue())
                    img_k = str(uuid.uuid1())
                    if cacheImage(img_k, imgb64str, ex=60) is True:
                        img_url = cache_server + img_k
                    else:
                        if cacheImage(img_k, imgb64str, ex=60) is True:
                            img_url = cache_server + img_k
                        else:
                            app.logger.error("redis缓存图片失败：" + args['img_url'])
                            img_url = quote(args['img_url'])
                except Exception as e:
                    app.logger.error(e)
                    final_result['code'] = 500
                    final_result['msg'] = '服务异常 请核实 img_url: ' + args['img_url']
                    return final_result, 200

        except Exception as e:
            app.logger.error("图片校验异常：" + args['img_url'])
            app.logger.error(e)
            final_result['code'] = 500
            final_result['msg'] = '服务异常 请核实 img_url: ' + args['img_url']
            return final_result, 200

        try:
            print(img_url)
            r = requests.get(_feature_base_url + img_url, timeout=10)
            if r.status_code != 200:
                app.logger.error("feature服务器误  " + str(r.status_code) + ' ' + args['img_url'])
                final_result['code'] = 500
                final_result['msg'] = "feature服务错误，请联系管理员"
                return final_result, 200
            vector = r.text
        except Exception as e:
            app.logger.error("feature服务错误：" + _feature_base_url + args['img_url'])
            app.logger.error(e)
            final_result['code'] = 500
            final_result['msg'] = "feature服务错误，请联系管理员"
            return final_result, 200

        engine_list = args['engine'].split(',')
        n2_size = str(args['n2_size'])
        n2_results = get_n2_results_by_multithreading(engine_list, vector, n2_size)

        if len(n2_results) == 0:
            app.logger.error("n2 服务器都挂了")
            final_result['code'] = 500
            final_result['msg'] = "n2服务异常，请联系管理员"
            return final_result, 200

        pr = parseResult(n2_results)
        res_tmp = sorted(pr, key=lambda x: x['score'])

        result_size = int(args['result_size'])
        res = res_tmp[:result_size]
        if int(args['surf_enable']) == 0:
            final_result['data'] = res
            return final_result, 200

        res_result = resparse(res)

        surf_results = []
        try:
            postData = {'origin_url': img_url, 'img_list': []}
            if len(res_result) > 0:
                for result_one in res_result:
                    if result_one['img_url'] and len(result_one['img_url']) > 10:
                        postData['img_list'].append({result_one['id']: result_one['img_url']})

            r = requests.post(_surf_uri, json=postData, timeout=10)
            surf_result = r.json()

            # app.logger.info(surf_result)

            for oneres in res_result:
                if oneres['id'] in surf_result:
                    oneres['surf_score'] = surf_result[oneres['id']]
                else:
                    oneres['surf_score'] = 0
                surf_results.append(oneres)

        except Exception as e:
            app.logger.error('surf_api 错误：')
            app.logger.error(e)
            ex_result = list(map(lambda x: {'from': x['from'], 'id': x['id'], 'surf_score': x['score'], 'n2_score': x['score'], 'is_pirate': 0}, res))
            # ex_result = list(map(lambda x: {'from': x['from'], 'id': x['id'], 'surf_score': x['score'], 'is_pirate': 0}, res))
            if args['model_enable'] == 1:
                ex_result = list(map(doclf, ex_result))
            return ex_result, 200
            pass

        result = sorted(surf_results, key=lambda x: x['surf_score'], reverse=True)

        # first = result[:1]
        # final_result = {'from': first[0]['from'], 'id': first[0]['id'], 'surf_score': first[0]['surf_score']}
        # return final_result, 200

        tmp_result = list(
            map(lambda x: {'from': x['from'], 'id': x['id'], 'surf_score': x['surf_score'], 'n2_score': x['score'], 'is_pirate': 1}
            if x['surf_score'] >= 0.095684803001876 else {'from': x['from'], 'id': x['id'],
                                                          'surf_score': x['surf_score'], 'n2_score': x['score'],
                                                          'is_pirate': 0}, result))
        if args['model_enable'] == 1:
            tmp_result = list(map(lambda x: doclf(x) if x['surf_score'] >= 0.095684803001876 else x, tmp_result))

        infologger.info("params:" + str(args) + " response:" + str(tmp_result))

        final_result['data'] = tmp_result
        return final_result, 200


class SearchImageService(Resource):
    def get(self):
        myparser = parser.copy()
        myparser.add_argument('img_url', type=str, required=True)
        myparser.add_argument('engine', type=str)
        myparser.add_argument('n2_size', type=int)
        myparser.add_argument('result_size', type=int)
        myparser.add_argument('surf_enable', type=int)
        myparser.add_argument('verify_img', type=int)
        myparser.add_argument('is_online', type=int)
        myparser.add_argument('from_autotagging', type=int)
        args = myparser.parse_args()
        if args['engine'] is None:
            args['engine'] = 'vcg_editor,vcg_creative,veer,500px'
        if args['n2_size'] is None:
            args['n2_size'] = 100
        elif args['n2_size'] <= 0 or args['n2_size'] > 100:
            args['n2_size'] = 100
        if args['result_size'] is None:
            args['result_size'] = 100
        elif args['result_size'] <= 0 or args['result_size'] > 100:
            args['result_size'] = 100
        if args['surf_enable'] is None:
            args['surf_enable'] = 0
        if args['verify_img'] is None:
            args['verify_img'] = 0
        if args['is_online'] is None:
            args['is_online'] = 0
        if args['from_autotagging'] is None:
            args['from_autotagging'] = 0

        final_result = {
            'code': 200,
            'data': {},
            'msg': 'OK'
        }

        if args['verify_img'] == 1:
            try:
                r = requests.get(args['img_url'], timeout=10)
                if r.status_code != 200:
                    app.logger.error("img_url 访问错误：" + args['img_url'] + str(r.status_code))
                    final_result['code'] = 500
                    final_result['msg'] = 'img_url: ' + args['img_url'] + ' 服务器无法访问该图片 请核实！'
                    return final_result, 200
                else:
                    im = Image.open(BytesIO(r.content))
                    w, h = im.size
                    max_v = max(w, h)
                    if max_v > 401:
                        app.logger.error("图片尺寸不合格：[" + str(w) + "," + str(h) + "]" + args['img_url'])
                        final_result['code'] = 500
                        final_result['msg'] = "图片尺寸不合格，最长边应小于等于400px"
                        return final_result, 200
                    img_format = str(im.format).lower()
                    if img_format not in ['jpeg', 'png', 'bmp']:
                        app.logger.error("图片格式不正确：[" + img_format + "] " + args['img_url'])
                        final_result['code'] = 500
                        final_result['msg'] = "图片格式不正确：[" + img_format + "], 仅支持格式：jpg、png、bmp"
                        return final_result, 200
            except Exception as e:
                app.logger.error("图片校验异常：")
                app.logger.error(e)
                final_result['code'] = 500
                final_result['msg'] = '服务异常 请核实 img_url: ' + args['img_url']
                return final_result, 200

        try:
            print(args['img_url'])
            if args['from_autotagging'] == 1:
                args['is_online'] = 1
                img_url = quote(args['img_url'])
            else:
                if args['engine'] == '500px':
                    img_url = quote(args['img_url'], 'utf-8')
                else:
                    img_url = quote(args['img_url'] + '?x-oss-process=image/resize,m_lfit,h_400,w_400', 'utf-8')

            r = requests.get(_feature_base_url + img_url, timeout=10)
            if r.status_code != 200:
                app.logger.error("feature服务器误  " + str(r.status_code) + ' ' + args['img_url'])
                final_result['code'] = 500
                final_result['msg'] = 'feature服务错误，请联系管理员'
                return final_result, 200
            vector = r.text
        except Exception as e:
            app.logger.error("feature服务错误：" + _feature_base_url + args['img_url'])
            app.logger.error(e)
            final_result['code'] = 500
            final_result['msg'] = 'feature服务错误，请联系管理员'
            return final_result, 200

        engine_list = args['engine'].split(',')
        n2_size = str(args['n2_size'])
        n2_results = get_n2_results_by_multithreading(engine_list, vector, n2_size)

        if len(n2_results) == 0:
            app.logger.error("n2 服务器都挂了")
            final_result['code'] = 500
            final_result['msg'] = 'n2服务异常，请联系管理员'
            return final_result, 200

        pr = parseResult(n2_results)
        res_tmp = sorted(pr, key=lambda x: x['score'])
        result_size = int(args['result_size'])

        if args['is_online'] == 1:
            res_tmp = filterOfflineImages(res_tmp[:result_size*2])

        res = res_tmp[:result_size]
        if int(args['surf_enable']) == 0:
            final_result['data']['result'] = list(map(lambda x: {'imageId': x['id'], 'score': x['score']}, res))
            return final_result, 200

        res_result = resparse(res)

        surf_results = []
        try:
            postData = {'origin_url': args['img_url'], 'img_list': []}
            if len(res_result) > 0:
                for result_one in res_result:
                    if result_one['img_url'] and len(result_one['img_url']) > 10:
                        postData['img_list'].append({result_one['id']: result_one['img_url']})

            r = requests.post(_surf_uri, json=postData, timeout=10)
            surf_result = r.json()

            # app.logger.info(surf_result)

            for oneres in res_result:
                if oneres['id'] in surf_result:
                    oneres['surf_score'] = surf_result[oneres['id']]
                else:
                    oneres['surf_score'] = 0
                surf_results.append(oneres)

        except Exception as e:
            app.logger.error('surf_api 错误：')
            app.logger.error(e)
            # ex_result = list(map(lambda x: {'from': x['from'], 'id': x['id'], 'score': x['score']}, result))
            # return ex_result, 200
            pass

        result = sorted(surf_results, key=lambda x: x['surf_score'], reverse=True)

        if args['engine'] == 'vcg_editor':
            final_result['data']['result'] = list(
                map(lambda x: {'imageId': x['id'], 'score': x['surf_score'], 'is_pirate': 1}
                if x['surf_score'] >= 0.095684803001876 else {'imageId': x['id'], 'score': x['surf_score'],
                                                              'is_pirate': 0}, result))
            return final_result, 200

        final_result['data']['result'] = list(map(lambda x: {'imageId': x['id'], 'score': x['surf_score']}, result))
        return final_result, 200


api.add_resource(CopyrightAnalyzer, '/v1/api/getsimilarimages')
api.add_resource(SearchImageService, '/v2/api/getsimilarimages')

if __name__ == '__main__':
    app.run(host='0.0.0.0', debug=True)