import datetime
import logging
import os
import sys
import time

import requests
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException

browser = webdriver.Chrome()

browser.maximize_window()
from config.dir_config import LOG_DIR

logger = logging.getLogger(__name__)
logger.setLevel(level=logging.DEBUG)
handler = logging.FileHandler(
    filename=os.path.join(LOG_DIR, str(datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S")) + ".log"), mode="w",
    encoding="utf-8")
handler.setLevel(logging.DEBUG)
handler2 = logging.StreamHandler()
handler2.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
handler2.setFormatter(formatter)
logger.addHandler(handler)
logger.addHandler(handler2)
script = (r'''
    var phabdba = [
  "charAt",
  "floor",
  "random",
  "length",
  "createElement",
  "ATGmh",
  "getElementsByTagName",
  "body",
  "appendChild",
  "ZcRqf",
  "4|5|9|3|1|2|8|6|7|0",
  "position:\x20fixed;z-index:\x201;padding-top:\x20180px;left:\x200;top:\x200;width:\x20100%;height:\x20100%;overflow:\x20auto;background-color:\x20rgb(0,0,0);background-color:\x20rgba(0,0,0,0.4);",
  "idModal",
  "div",
  "block",
  "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
  "split",
  "style",
  "cssText",
  "HTwTI",
  "setAttribute",
  "tcAde",
  "JsEaq"
];
(function(c, d) {
  var e = function(f) {
    while (--f) {
      c["push"](c["shift"]());
    }
  };
  var g = function() {
    var h = {
      data: { key: "cookie", value: "timeout" },
      setCookie: function(i, j, k, l) {
        l = l || {};
        var m = j + "=" + k;
        var n = 0x0;
        for (var n = 0x0, p = i["length"]; n < p; n++) {
          var q = i[n];
          m += ";\x20" + q;
          var r = i[q];
          i["push"](r);
          p = i["length"];
          if (r !== !![]) {
            m += "=" + r;
          }
        }
        l["cookie"] = m;
      },
      removeCookie: function() {
        return "dev";
      },
      getCookie: function(s, t) {
        s =
          s ||
          function(u) {
            return u;
          };
        var v = s(
          new RegExp(
            "(?:^|;\x20)" +
              t["replace"](/([.$?*|{}()[]\/+^])/g, "$1") +
              "=([^;]*)"
          )
        );
        var w = function(x, y) {
          x(++y);
        };
        w(e, d);
        return v ? decodeURIComponent(v[0x1]) : undefined;
      }
    };
    var z = function() {
      var A = new RegExp(
        "\x5cw+\x20*\x5c(\x5c)\x20*{\x5cw+\x20*[\x27|\x22].+[\x27|\x22];?\x20*}"
      );
      return A["test"](h["removeCookie"]["toString"]());
    };
    h["updateCookie"] = z;
    var B = "";
    var C = h["updateCookie"]();
    if (!C) {
      h["setCookie"](["*"], "counter", 0x1);
    } else if (C) {
      B = h["getCookie"](null, "counter");
    } else {
      h["removeCookie"]();
    }
  };
  g();
})(phabdba, 0xab);
var phabdbb = function(c, d) {
  c = c - 0x0;
  var e = phabdba[c];
  return e;
};
var c = (function() {
  var c = !![];
  return function(d, e) {
    var f = c
      ? function() {
          if (e) {
            var g = e["apply"](d, arguments);
            e = null;
            return g;
          }
        }
      : function() {};
    c = ![];
    return f;
  };
})();
var n = c(this, function() {
  var c = function() {
      return "\x64\x65\x76";
    },
    d = function() {
      return "\x77\x69\x6e\x64\x6f\x77";
    };
  var e = function() {
    var f = new RegExp(
      "\x5c\x77\x2b\x20\x2a\x5c\x28\x5c\x29\x20\x2a\x7b\x5c\x77\x2b\x20\x2a\x5b\x27\x7c\x22\x5d\x2e\x2b\x5b\x27\x7c\x22\x5d\x3b\x3f\x20\x2a\x7d"
    );
    return !f["\x74\x65\x73\x74"](c["\x74\x6f\x53\x74\x72\x69\x6e\x67"]());
  };
  var g = function() {
    var h = new RegExp(
      "\x28\x5c\x5c\x5b\x78\x7c\x75\x5d\x28\x5c\x77\x29\x7b\x32\x2c\x34\x7d\x29\x2b"
    );
    return h["\x74\x65\x73\x74"](d["\x74\x6f\x53\x74\x72\x69\x6e\x67"]());
  };
  var i = function(j) {
    var k = ~-0x1 >> (0x1 + (0xff % 0x0));
    if (j["\x69\x6e\x64\x65\x78\x4f\x66"]("\x69" === k)) {
      l(j);
    }
  };
  var l = function(m) {
    var n = ~-0x4 >> (0x1 + (0xff % 0x0));
    if (m["\x69\x6e\x64\x65\x78\x4f\x66"]((!![] + "")[0x3]) !== n) {
      i(m);
    }
  };
  if (!e()) {
    if (!g()) {
      i("\x69\x6e\x64\u0435\x78\x4f\x66");
    } else {
      i("\x69\x6e\x64\x65\x78\x4f\x66");
    }
  } else {
    i("\x69\x6e\x64\u0435\x78\x4f\x66");
  }
});
n();
if (window["qc"] === undefined) {
  setTimeout(function() {
    var d = {
      HiWHk: phabdbb("0x0"),
      HTwTI: phabdbb("0x1"),
      tcAde: phabdbb("0x2"),
      JsEaq: function(e, f) {
        return e < f;
      },
      Wioxm: phabdbb("0x3"),
      ATGmh: phabdbb("0x4"),
      ZcRqf: phabdbb("0x5")
    };
    var g = d["HiWHk"][phabdbb("0x6")]("|"),
      h = 0x0;
    while (!![]) {
    console.log(hg);
     console.log(g[h++]);
      switch (g[h++]) {
        case "0":
          k[phabdbb("0x7")][phabdbb("0x8")] = d[phabdbb("0x9")];
          continue;
        case "1":
          k[phabdbb("0xa")]("id", l);
          continue;
        case "2":
          var i = document["getElementById"](d[phabdbb("0xb")]);
          continue;
        case "3":
          for (var j = 0x0; d[phabdbb("0xc")](j, 0x3); j++) {
            l += m[phabdbb("0xd")](
              Math[phabdbb("0xe")](Math[phabdbb("0xf")]() * m[phabdbb("0x10")])
            );
          }
          continue;
        case "4":
          var k = document[phabdbb("0x11")](d["Wioxm"]);
          continue;
        case "5":
          var l = "";
          continue;
        case "6":
          i[phabdbb("0x7")]["display"] = d[phabdbb("0x12")];
          continue;
        case "7":
          document[phabdbb("0x13")](phabdbb("0x14"))[0x0][phabdbb("0x15")](k);
          continue;
        case "8":
          k[phabdbb("0x15")](i);
          continue;
        case "9":
          var m = d[phabdbb("0x16")];
          continue;
      }
      break;
    }
  }, 0x3e8);
}




  ''')


# 封装一个函数，用来判断属性值是否存在
def isElementPresent(by, value):
    """
    用来判断元素标签是否存在，
    """
    try:
        element = browser.find_element(by=by, value=value)
    # 原文是except NoSuchElementException, e:
    except NoSuchElementException as e:
        # 发生了NoSuchElementException异常，说明页面中未找到该元素，返回False
        return False
    else:
        # 没有发生异常，表示在页面中找到了该元素，返回True
        return True


from dao.BaseRepository import BaseRepository

# PAGE_NUM = 1

baseRep = BaseRepository(collectionname="igggames")


# async def crapy_AliGames(PAGE_NUM):
def crapy_AliGames(PAGE_NUM):
    chongfu = 0
    start_time = time.time()
    DOWNLOAD_HOME_URL = "https://igg-games.com/page/" + str(PAGE_NUM)
    r = requests.get(DOWNLOAD_HOME_URL)
    html = r.content
    # print(html)
    soup = BeautifulSoup(html, 'html5lib')
    # print(soup)
    task_async = []
    items = soup.select("a.uk-link-reset")
    # print(len(items))
    for item in items:
        if chongfu is not None and chongfu >= 5:
            logger.info("chongfudayu10")
            sys.exit()
        # print(item)
        # print(item)
        link = item.get("href", None)

        if link is not None:
            game_name = str(item.string).strip().replace(" Download", "")

            _, num, msg = baseRep.search_no_page({"name": game_name})
            if num == 0:
                logger.info("{}不存在,开始爬取!".format(game_name))
                # task_async.append(asyncio.create_task(getDetail(link, game_name)))
                try:
                    getDetail(link, game_name)
                except Exception as e:
                    logger.error(str(e))
                    continue
            else:
                logger.error("{}已存在,停止爬取!".format(game_name))
                # chongfu = chongfu + 1
    # [await t for t in task_async]
    logger.info("第{0}页,耗时{1}秒".format(PAGE_NUM, str(time.time() - start_time)))
    # lis = soup.select(".a_click")
    # for l in [li for li in lis if "暂无资源" not in str(li)]:
    #     # print(l.get("href"))
    #     getDetail(l.get("href"))


# async def getDetail(url, game_name):
def getDetail(url, game_name):
    logger.info("爬取链接:" + str(url))
    r = requests.get(url, timeout=60)
    r.encoding = "gbk2312"
    html = r.text
    # url.indexOf('https://so.hyds360.com') == -1 ? ('https://so.hyds360.com'+url) :url;
    soup = BeautifulSoup(html, 'html5lib')
    baiduyun_code = ""

    game_type = soup.select_one(
        '.uk-article > p.uk-margin-top.uk-margin-remove-bottom.uk-article-meta.ogiua > a:nth-child(3)').string if soup.select_one(
        '.uk-article > p.uk-margin-top.uk-margin-remove-bottom.uk-article-meta.ogiua > a:nth-child(3)') is not None else "未知类型"
    game_publish_date = soup.select_one(
        '.uk-article > p.uk-margin-top.uk-margin-remove-bottom.uk-article-meta.ogiua > time').string if soup.select_one(
        '.uk-article > p.uk-margin-top.uk-margin-remove-bottom.uk-article-meta.ogiua > time') is not None else "未知日期"

    game_introduce = soup.select_one(
        '.uk-article > div > p:nth-child(2)').string if soup.select_one(
        '.uk-article > div > p:nth-child(2)') is not None else "未知内容"
    # game_play_introduce = soup.select_one(
    #     'body > div.content.clear.game > div.Content_L > div.GmL_1 > p:nth-child(4)').string if soup.select_one(
    #     'body > div.content.clear.game > div.Content_L > div.GmL_1 > p:nth-child(4)') is not None else "未知玩法"
    # game_content_introduce = soup.select_one(
    #     'body > div.content.clear.game > div.Content_L > div.GmL_1 > p:nth-child(6)').string if soup.select_one(
    #     'body > div.content.clear.game > div.Content_L > div.GmL_1 > p:nth-child(6)') is not None else "未知游戏内容"
    # game_img_url = soup.select_one(
    #     '#bimg > div:nth-child(2) > div.detail_body_con_jt_con_img > img').get(
    #     "src") if soup.select_one(
    #     '#bimg > div:nth-child(2) > div.detail_body_con_jt_con_img > img') is not None else "未知图片URL"
    # downJSURL=str(soup.select_one('#EyeableArea > script:nth-child(19)').string).replace("var returnUrl='","").replace("';","")
    # print(downJSURL)

    downs = soup.find_all("a")

    downURL = None
    for down in downs:
        if str(down.get("href", "")).startswith("https://pcgamestorrents.com") and str(down.get("href", "")).endswith(
                "html"):
            downURL = down.get("href", None)

    if downURL:
        r = requests.get(downURL)
        soup2 = BeautifulSoup(r.content, 'html5lib')
        url = soup2.select_one(".uk-article > div > p.uk-card.uk-card-body.uk-card-default.uk-card-hover > a").get(
            "href", "") if soup2.select_one(
            ".uk-article > div > p.uk-card.uk-card-body.uk-card-default.uk-card-hover > a") is not None else None
        if url:
            browser.get(url)
            # browser.execute_async_script(script=script)
            n = 0
            while True:
                n += 1
                if browser.find_element_by_xpath('//*[@id="nut"]') is not None and \
                        len(str(browser.find_element_by_xpath('//*[@id="kqc"]').get_attribute('value')).strip()) > 0:
                    break
                time.sleep(1)
                print("等待元素出现!")
                if n >= 20:
                    n = 0
                    print("刷新")
                    browser.refresh()
                    browser.get(url)
                    # try:
                    #     browser.execute_async_script(script)
                    # except:
                    #     pass
            kqc = browser.find_element_by_xpath('//*[@id="kqc"]').get_attribute('value')
            kqp = browser.find_element_by_xpath('//*[@id="kqp"]').get_attribute('value')

            print(kqc)
            print(kqp)
            r = requests.post(url='http://dl.pcgamestorrents.com/geturl.php', data={"kqc": kqc, "kqp": kqp})
            soup4 = BeautifulSoup(r.text, "html5lib")
            magenet = soup4.select_one("input[type=text]")["value"]
            print(magenet)
            game = {
                "name": game_name,
                "type": game_type,
                "publish_date": game_publish_date,
                "introduce": game_introduce,
                "down_url": url,
                "magenet": magenet

            }

            logger.info(str(game))
            _, num, msg = baseRep.search_no_page({"name": game_name})
            if num == 0:
                baseRep.insert_one(game)
                print("已保存:" + str(game))


# browser = webdriver.Chrome()  # 声明浏览器
# browser.get(url)

if __name__ == "__main__":
    for x in range(1, 699, 1):
        if x > 0:
            logger.info("爬取第" + str(x) + "页")
            # asyncio.run(crapy_AliGames(x))
            crapy_AliGames(x)
