import pymysql
import requests
from bs4 import BeautifulSoup

# 连接到MySQL数据库
conn = pymysql.connect(host='localhost', user='root', password='root', db='journal', charset='utf8')
cursor = conn.cursor()

# 创建期刊论文表（如果未创建的话）
create_table_sql = ''' '''
cursor.execute(create_table_sql)
conn.commit()

# 爬取论文信息的函数
def crawl_paper_info(url):
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'
    }
    response = requests.get(url, headers=headers)

    # 检查请求是否成功
    if response.status_code == 200:
        soup = BeautifulSoup(response.text, 'html.parser')

        # 获取标题、作者和摘要
        title_tag = soup.find('meta', attrs={'name': 'dc.title'})
        title = title_tag['content'] if title_tag else "未找到标题"

        author_tag = soup.find('meta', attrs={'name': 'dc.creator'})
        author = author_tag['content'] if author_tag else "未找到作者"

        abstract_tag = soup.find('meta', attrs={'name': 'description'})
        abstract = abstract_tag['content'] if abstract_tag else "未找到摘要"

        return title, author, abstract
    else:
        print("请求失败，状态码：", response.status_code)
        return None, None, None

# 将爬取到的论文信息插入数据库
def insert_paper_to_db(title, author, abstract):
    if title and abstract:  # 确保获取到有效的数据
        insert_sql = "INSERT INTO paper (title, author, paperabstract) VALUES (%s, %s, %s)"
        cursor.execute(insert_sql, (title, author, abstract))
        conn.commit()
        print(f"已插入论文: {title}")
    else:
        print("无效的数据，跳过插入。")

# 目标文章URL列表
urls = [
    'https://www.hanspub.org/journal/paperinformation?paperid=102228',
    'https://www.hanspub.org/journal/paperinformation?paperid=102034',
    'https://www.hanspub.org/journal/paperinformation?paperid=91118'
]

# 爬取和存储论文信息
for url in urls:
    title, author, abstract = crawl_paper_info(url)
    insert_paper_to_db(title, author, abstract)

# 查询并打印论文信息以验证存储成功
select_sql = "SELECT * FROM paper"
cursor.execute(select_sql)
rows = cursor.fetchall()
for row in rows:
    print(f"ID: {row[0]}, Title: {row[1]}, Author: {row[2]}, Abstract: {row[3]}")

# 关闭数据库连接
cursor.close()
conn.close()
