import pymysql
import requests
from bs4 import BeautifulSoup


def scrawler(url, selector, flag):
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 Edg/128.0.0.0'}
    r = requests.get(url, headers=headers)
    codes = r.text
    bs = BeautifulSoup(codes, 'html.parser')
    if flag == 0:
        return [item.text.strip() for item in bs.select(selector=selector)]
    elif flag == 1:
        return [item.attrs['href'] for item in bs.select(selector=selector)]


class DBTool():
    def __init__(self):
        self.conn = pymysql.connect(host='localhost', database="journal"
                                    , user='root', password='123456'
                                    , charset='utf8')
        self.cursor = self.conn.cursor()

    def queryAll(self):
        self.cursor.execute('select * from soft_journal')
        return self.cursor.fetchall()

    def queryOne(self, title):
        self.cursor.execute('select * from soft_journal where title=%s', title)
        return self.cursor.fetchone()

    def insert(self, title, author, abstract, pubdate):
        flag = False
        try:
            self.cursor.execute('insert into soft_journal values ('
                                'null,"{}","{}","{}","{}")'.
                                format(title, author, abstract, pubdate))
            self.conn.commit()
            flag = True
        except Exception as e:
            print(e)
        return flag

if __name__ == '__main__':

    url = "http://gxbwk.njournal.sdu.edu.cn/CN/1672-3961/home.shtml"
    article_urls = scrawler(url, "dd.biaoti > a", flag=1)[:5]
    title, author, abstract, update = [], [], [], []
    for url in article_urls:
        title.append(scrawler(url, "#goTop > div.container.whitebg > div.abs-con > div > div > h3:nth-child(4)", flag=0))
        author.append(
            scrawler(url, "#goTop > div.container.whitebg > div.abs-con > div > div > p:nth-child(5) > span", flag=0))
        abstract.append(scrawler(url, "#collapseOne > div > p:nth-child(1)", flag=0))
        update.append(scrawler(url, "#divPanel > ul > li:nth-child(1) > span:nth-child(5)", flag=0))

    dbTool = DBTool()
    for i in range(len(article_urls)):
        dbTool.insert(''.join(title[i]), ''.join(author[i]), ''.join(abstract[i]), ''.join(update[i]))
