import yaml,pymysql,requests,_thread,queue,threading,time,cchardet,json
import lxml

from concurrent.futures import ThreadPoolExecutor  # 线程池模块

yamlPath = 'mysql_conninfo.yaml'
with open(yamlPath) as f:
        yaml_data = yaml.load(f, Loader=yaml.FullLoader)

# host=yaml_data['host']
# port=yaml_data['port']
# user=yaml_data['user']
# passwd=yaml_data['passwd']
# database=yaml_data['database']


# conn = pymysql.connect(host,user,passwd,database,port)
# cursor = conn.cursor()
# cursor.close()
# conn.close()


exitFlag = 0

class myThread (threading.Thread):
    def __init__(self, threadID, name, q):
        threading.Thread.__init__(self)
        self.threadID = threadID
        self.name = name
        self.q = q
    def run(self):
        print ("开启线程：" + self.name)
        process_data(self.threadID,self.name, self.q)
        print ("退出线程：" + self.name)

def process_data(threadID,threadName, q):
    while not exitFlag:
        queueLock.acquire()
        if not workQueue.empty():
            data = q.get()
            
            res = requests.get(threadName)
            encoding = cchardet.detect(res.content)['encoding']
            body = res.content.decode(encoding)
            url = threadName
            sql_str = "INSERT INTO tb_url(`url`,`title`,`html`) VALUE ('%s','','%s'); " % ( url , pymysql.escape_string(body) )
            insert_or_updata_or_delete_data(sql_str)            
            queueLock.release()
            print ("url: %s name: %s processing %s" % (threadID ,threadName, data))
        else:
            queueLock.release()
        time.sleep(1)

#获得当前页面的下一页 暂时不需要 已经找到json数据请求url
# def get_current_page_next_page(url):



def connect_msyql():
    host=yaml_data['host']
    port=yaml_data['port']
    user=yaml_data['user']
    passwd=yaml_data['passwd']
    database=yaml_data['database']
    conn = pymysql.connect(host,user,passwd,database,port)
    cursor = conn.cursor()
    return conn,cursor

#关闭mysql链接
def close_mysql(cursor,conn):
    cursor.close()
    conn.close()

 #插入和更新还有删除一条或多条数据
def insert_or_updata_or_delete_data(sql_cmd):
    conn,cursor = connect_msyql()
    result = cursor.execute(sql_cmd)
    conn.commit()
    close_mysql(cursor, conn)
    return result

# threadList = ["Thread-1", "Thread-2", "Thread-3"]
url_list = ['https://news.sina.com.cn/china/','https://news.sina.com.cn/world/','https://mil.news.sina.com.cn/','https://mil.news.sina.com.cn/dgby/']

url_page_list = []

# 国内新闻 125页
# https://feed.sina.com.cn/api/roll/get?pageid=121&lid=1356&num=20&versionNumber=1.2.4&page=4&encode=utf-8&callback=feedCardJsonpCallback&_=1625059957491 
# 国际新闻 37页
# https://interface.sina.cn/news/get_news_by_channel_new_v2018.d.html?cat_1=51923&show_num=27&level=1,2&page=4&callback=newsloadercallback&_=1625064534909
for i in range(1,126):
    new_url = "https://feed.sina.com.cn/api/roll/get?pageid=121&lid=1356&num=20&versionNumber=1.2.4&page=%d&encode=utf-8&callback=feedCardJsonpCallback&_=%d" % ( i, int(time.time()*1000) )
    url_page_list.append(new_url)
for j in range(1,38):
    new1_url = "https://interface.sina.cn/news/get_news_by_channel_new_v2018.d.html?cat_1=51923&show_num=27&level=1,2&page=%d&callback=newsloadercallback&_=%d" % ( i, int(time.time()*1000) )
    url_page_list.append(new1_url)


nameList = ["One", "Two", "Three", "Four"]
queueLock = threading.Lock()
workQueue = queue.Queue(10)
threads = []
threadID = 1

# 创建新线程
for tName in url_page_list:
    thread = myThread(threadID, tName, workQueue)
    thread.start()
    threads.append(thread)
    threadID += 1

# 填充队列
queueLock.acquire()
for word in nameList:
    workQueue.put(word)
queueLock.release()

# 等待队列清空
while not workQueue.empty():
    pass

# 通知线程是时候退出
exitFlag = 1

# 等待所有线程完成
for t in threads:
    t.join()