import sys
import traceback

import undetected_chromedriver as uc
from lxml import etree
import pymysql
from dbse.mysql.db_mysql_insert import addStockNews, getDataBase
from dbse.mysql.db_entity_stock_news import StockNews
from selenium.webdriver import ChromeOptions
import time
from selenium.webdriver.chrome.service import Service

from sel.proxy.selenium_proxy_util import initSelWireChrome

'''
新闻_证券时报 7x24
'''

def handlePage():
    print("begin handle page")




def startWork(profile):
    try:
        browser = None
        redis = None
        consumer= None
        db= None
        db = getDataBase(profile)
        sql = 'INSERT INTO stock_news (title, content,create_time,channel,news_time,url) VALUES (%s,%s,%s,%s,%s,%s)'
        browser = initSelWireChrome(useProxy=False,isHeadless=False,host='https://www.egsea.com/news/flash.html',setTimeOut=False,ignorePic=False)
        isContinue = True
        last_total = 0


        while isContinue:
            # 处理页面
            time.sleep(15)
            html = etree.fromstring(browser.page_source,etree.HTMLParser())
            urls = html.xpath('//div[contains(@class,"share-popup")]//@data-url')
            # 标题
            titles = html.xpath('//div[contains(@class,"share-popup")]//@data-title')
            # 详情
            descs = html.xpath('//div[contains(@class,"share-popup")]//@data-description')
            # 新闻时间
            newsTimes = html.xpath('//ul//div[@class="time"]//text()')
            # for item in titles:
            #     print(item)
            # for item in descs:
            #     print(item)
            # for item in newsTimes:
            #     print(item)
            total = len(urls)
            add_num = total - last_total
            last_total = total

            # 增量数据入库
            if add_num > 0:
                print("==============begin insert datas==============")
                for num in range(add_num):
                    stockNews = StockNews(title=titles[num],content=descs[num],create_time=None,channel='e网',news_time=None,url=urls[num])
                    # stockNews = StockNews(titles[num],descs[num],'e网',urls[num])
                    addStockNews(db,sql,stockNews)


    except Exception as e:
        print("exception occour")
        print('========发生异常，重启任务=======')
        print(f"异常信息: {e}")
        print("详细堆栈信息:")
        # traceback.print_exc()  # 打印完整的堆栈跟踪
        # if(browser != None):
        #     browser.quit()                      #释放浏览器进程
        #
        # if(db != None):
        #     db.close()                          # 释放db资源
        #
        # if(consumer!=None):
        #     consumer.close()                    # 释放kafka资源
        #
        # if(redis!=None):
        #     redis.close()                       # 释放redis资源
        # else:
        #     print("=================重启chrome================")
    finally:
        if browser!= None:
            browser.close()
        if db != None:
            db.close()


profile = sys.argv[1:][0]
startWork(profile)








