'''
Author: your name
Date: 2021-08-14 00:52:46
LastEditTime: 2021-08-14 23:07:14
LastEditors: Please set LastEditors
Description: In User Settings Edit
FilePath: \python\neteasenews.py
'''
import asyncio
from asyncio.tasks import sleep
from asyncio.windows_events import NULL
from requests_html import HTMLSession
import html2text
from elasticsearch_dsl import (
    Document,
    SearchAsYouType,
    analyzer,
    connections,
    token_filter,
    Search
)

from elasticsearch_dsl.query import MultiMatch

# es连接到本地，可以连接到多台服务器


class News(Document):
    name = SearchAsYouType(max_shingle_size=3)

    class Index:
        name = "news"
        settings = {"number_of_shards": 1, "number_of_replicas": 0}

# initiate the default connection to elasticsearch
es = connections.create_connection(hosts=["http://hyrtc.net:9200"])
# create the empty index
News.init()

def crawlNeteaseNews(tbl):
    for i in range(len(tbl)):
        print( tbl[i]['href'])
        response = session.request("GET", tbl[i]['href'])
        response.encoding = 'utf-8'
        response.html.render(keep_page=True,timeout = 1000,sleep = 0)
        async def asynccapture():
            query = 'div.post_main '
            objselect = await response.html.page.querySelector(query)

            # if objselect != None:
            #     await objselect.screenshot({'path': 'result4.png'})
            if objselect != None:
                temp = await objselect.getProperty("innerHTML")
                content = await temp.jsonValue()
                tbl[i]['content'] = html2text.html2text(content)
        asyncio.get_event_loop().run_until_complete(asynccapture())



### program start
session = HTMLSession()
url = "https://www.163.com"
payload = {}
headers = {
}
response = session.request("GET", url)
response.encoding = 'utf-8'
tbl = {}
alls = []

try:
    response.html.render(keep_page=True,timeout = 0,sleep = 0)
    async def main():
        await response.html.page.waitFor(100)
        newsimportant = '#js_index2017_wrap > div:nth-child(2) > div.ne_area.ne_index_area > div.cm_area.ns_area_first > div.col_lm > div.col_c.overflowh > div > div > div > div.tab_panel.tab_panel_yaowen.current > div.yaowen_news > div.news_default_yw.news_yw_show > ul:nth-child(%d)'
        for j in range(3):
            query = newsimportant%(j + 1)
            objselect = await response.html.page.querySelector(query)
            # await objselect.screenshot({'path': 'result2.png'})
            #js_index2017_wrap > div:nth-child(2) > div.ne_area.ne_index_area > div.cm_area.ns_area_first > div.col_lm > div.col_c.overflowh > div > div > div > div.tab_panel.tab_panel_yaowen.current > div.yaowen_news > div.news_default_yw.news_yw_show > ul:nth-child(1) > li:nth-child(2)
            for k in range(5):
                # 按位置顺序依次输出
                alltr = "li:nth-child(%d) > a" % (k + 1)
                title = await objselect.querySelector(alltr)
                # await title.screenshot({'path': 'result3.png'})
                if title == None:
                    break
                temp = await title.getProperty("innerHTML")
                titlename = await temp.jsonValue()
                temp = await title.getProperty("href")
                href = await temp.jsonValue()
                alls.append({"title":html2text.html2text(titlename),"href":href})
    asyncio.get_event_loop().run_until_complete(main())
except Exception as e:
    print(e)
# finally:
#     session.close()
crawlNeteaseNews(alls)
for i in range(len(alls)):
    print(alls[i]['title'])
    s = Search(index='news').query("term", title__keyword = alls[i]['title'])
    response = s.execute()
    if(len(response.hits) == 0):
        es.index(index="news", op_type='create', body=alls[i])
