# coding=utf-8
# 后台管理用户相关
from abc import ABC
from tornado.log import app_log
from apps import handlers
import jieba
from apps.base_handler import base_handler
from verify import *
from public_libs.baidu_svo_extract import SVOParser
import synonyms


# 获取三元图数据
@handlers.route(r"/plt/data")
class devList(base_handler, ABC):
  keywords = base_verify(_type=str, default=None)

  article_id = base_verify(_type=str, default=None)

  wordItems = {}
  linkItems = {}
  links = []

  async def post(self, *args, **kwargs):
    self.wordItems = {}
    self.linkItems = {}
    self.links = []
    if not self.keywords and not self.article_id:
      return self.finish(self.response(status_code=4004, message="keywords 或 article_id 至少填一个"))
    elif self.article_id:
      await self.id_mode()
    elif self.keywords:
      await self.keyword_mode()
    else:
      return self.finish(self.response(status_code=4004))
    data = []

    links = []
    for i in self.links:
      i["target"] = self.get_item_id(i.get("target"))
      i["source"] = self.get_item_id(i.get("source"))
      links.append(i)

    for k, v in self.wordItems.items():
      data.append({"name": k, "id": v})

    return self.finish(self.response(data={"data": data, "links": links}))

  async def id_mode(self):
    data_list = await self.query_all(sql="SELECT * FROM triple WHERE no = '%s'" % self.article_id)
    for i in data_list:
      source = i.get("node1")
      target = i.get("node2")
      if self.linkItems.get(str(source) + str(target)):
        continue
      self.links.append({"source": source,
                         "target": target,
                         "value": i.get("relation"),
                         "symbolSize": 5,
                         "lineStyle": {
                           "normal": {
                             "width": synonyms.compare(i.get("node1"), i.get("node2") * 10, seg=True),
                             "color": '#4b565b'
                           }
                         }})
      self.linkItems[str(source) + str(target)] = True

  async def keyword_mode(self):
    app_log.warning("关键词: %s", self.keywords)
    for word in set(self.keywords.split(" ")):
      await self.load_data(keyword=word)

  def get_item_id(self, word):
    word_id = self.wordItems.get(word, None)
    if word_id is None:
      word_id = self.wordItems.__len__()
      self.wordItems[word] = word_id
    return word_id

  async def load_data(self, keyword, depth=0):
    data_list = await self.query_all(
      sql="SELECT * FROM triple WHERE node1 like '{keyword}' limit 10".format(keyword=keyword))
    app_log.warning(data_list)
    if data_list and depth == 0:
      self.links.append({"source": "结果",
                         "target": keyword,
                         "value": "",
                         "symbolSize": 5})
    for i in data_list:
      source = i.get("node1")
      target = i.get("node2")
      if self.linkItems.get(str(source) + str(target)):
        continue
      self.links.append({"source": source,
                         "target": target,
                         "value": i.get("relation"),
                         "symbolSize": 5,
                         "lineStyle": {
                           "normal": {
                             "width": synonyms.compare(i.get("node1"), i.get("node2") * 10, seg=True),
                             "color": '#4b565b'
                           }
                         }})
      self.linkItems[str(source) + str(target)] = True
      if depth < 2:
        await self.load_data(i.get("node2"), depth=depth + 1)


# 提交文本内容
@handlers.route("/up/doc")
class up_doc(base_handler, ABC):
  doc_id = base_verify(_type=str, is_none=False)
  index = base_verify(_type=str, default="graph-doc")

  async def post(self):
    extractor = SVOParser()
    try:
      doc = self.get_doc()
    except Exception as e:
      app_log.info(e)
      return self.finish(self.response(status_code=4008))
    if not doc:
      return self.finish(self.response(status_code=4010))
    try:
      content = doc.get('content')
      svo_s = extractor.triples_main(content, self.doc_id)
      sql = "INSERT INTO `triple` (`node1`, `relation`, `node2`, `no`) VALUES (%s, %s, %s, %s);"
      await self.execute_many(sql=sql, args=self.data_cleansing(svo_s))
    except Exception as e:
      app_log.info(e)
    return self.finish(self.response())

  def get_doc(self):
    result = self.es.search(index=self.index, body={
      "size": 1,
      "query": {
        "bool": {
          "filter": [
            {"term": {"_id": self.doc_id}}
          ]
        }
      }
    }
                            )
    if '_shards' in result and result['_shards']['successful'] > 0:
      hits = result['hits']['hits']
      if hits:
        record = hits[0]
        return record.get("_source", {})
    return False

  def data_cleansing(self, svo_s):
    svo_list = []
    for i in svo_s:
      a1 = re.sub("[A-Za-z0-9 !@#$%^&*()=+_?><|}{.例图链接本文其中所《》示“：”、，！￥…（）—]", "", i[0])
      a2 = re.sub("[A-Za-z0-9 !@#$%^&*()=+_?><|}{.例图链接本文其中所《》示“：”、，！￥…（）—]", "", i[2])
      if (a1.strip().__len__() <= 1 or a2.strip().__len__() <= 1) or (
        a1.strip().__len__() >= 10 or a2.strip().__len__() >= 10):
        continue
      i[0] = re.sub(" ", "", i[0])
      i[2] = re.sub(" ", "", i[2])
      svo_list.append(i)

    return svo_list


# 上传三元组
@handlers.route("/up/triple")
class up_triple(base_handler, ABC):
  list = base_verify(_type=list, is_none=False)

  async def post(self):
    sql = "INSERT INTO `triple` (`node1`, `relation`, `node2`) VALUES (%s, %s, %s);"
    ls = [[i.get("source"), i.get("value"), i.get("target")] for i in self.list]
    await self.execute_many(sql=sql, args=ls)


# 请求es
@handlers.route("/es/search")
class es_search(base_handler, ABC):
  keywords = base_verify(default=None)
  article_type = base_verify(default="doc")
  query_type = base_verify(default="search")  # recommend search
  index = base_verify(_type=str, default="graph-doc")
  pre_tags = base_verify(_type=str, default="<font color='red'>")
  post_tags = base_verify(_type=str, default="</font>")
  fragment_size = base_verify(_type=int, default=200)
  page_size = base_verify(default=10)
  page = base_verify(_type=int, default=1)
  user_id = base_verify(_type=int, default=0)

  async def post(self):
    # query = list(jieba.cut(self.keywords))
    # app_log.info(" ".join(query))
    if self.keywords is None:
      return self.finish(self.response(status_code=4009, message="key words 不能为空"))
    musts = []
    if self.query_type == 'search':
      for i in self.keywords.split(" "):
        musts.append(
          {
            "match_phrase" : {
              "content" : {
                "query" : i
              }
          }}
        )
    else:
      musts.append({"multi_match": {
                      "query": " ".join(self.keywords)
                    }})
    musts.append({
                "bool": {
                  "filter": [{"term": {"type": self.article_type}}] if self.article_type is not None else []
                }
              })
    try:
      body = {
        "size": self.page_size,
        "from": self.page_size * (self.page - 1),
        "query": {
          "bool": {
            "must": musts
            # "must": [
            #   {
            #     "bool": {
            #       "must": musts if self.query_type == 'search' else [
            #         {"multi_match": {
            #           "query": " ".join(self.keywords)
            #         }}
            #       ]
            #     }
            #   },
            #   {
            #     "bool": {
            #       "filter": [{"term": {"type": self.article_type}}] if self.article_type is not None else []
            #     }
            #   }
            # ]
          }
        },
        "highlight": {
          "fields": {
            "content": {
              "number_of_fragments": 1
            },
            "title": {
              "number_of_fragments": 1
            }
          },
          "pre_tags": self.pre_tags,
          "post_tags": self.post_tags,
          "fragment_size": self.fragment_size
        }
      }
      if self.user_id and self.article_type == "doc":
        body["query"]["bool"]["must"].append(
          {
            "bool": {
              "should": [
                {"term": {
                  "user_id.keyword": self.user_id
                }},
                {"term": {
                  "status.keyword": 1
                }}
              ]
            }
          })
      app_log.info(body)
      result = self.es.search(index=self.index,
                              body=body
                              )
      if '_shards' in result and result['_shards']['successful'] > 0:
        item = result.get("hits", {})
        hits = item.get("hits", [])
        total = item.get("total", {}).get("value", 0)
        return self.finish(self.response(data={"list": hits, "total": total}))
    except Exception as e:
      app_log.info(e)
      return self.finish(self.response(status_code=4009))
