# -*- coding = utf-8 -*-
# @Time : 2021/7/6 10:31
# @Author : Mr.Bao
# @File : spider.py
# @Software : PyCharm
import urllib.request
import bs4
import sqlite3


def askURL(url):
    print("正在请求", url)
    head = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0"}
    # 用户代理，表示告诉豆瓣服务器，我们是什么类型的机器，我们可以接受什么水平的文件
    request = urllib.request.Request(url, headers=head)
    html = ""
    try:
        response = urllib.request.urlopen(request)
        html = response.read().decode("utf-8")
        # print(html)
    except urllib.error.URLError as e:
        if hasattr(e, "code"):
            print(e.code)
        if hasattr(e, "reason"):
            print(e.reason)
    print("请求成功")
    return html


def getOneItem(classUrl, item):
    # 项目图标
    try:
        imgSrc = item.img.attrs['src']
    except Exception as e:
        imgSrc = ''
    # 项目名称
    title = item.h3.a["title"]
    # 项目地址为
    targetUrl = classUrl + item.h3.a["href"]
    # 是否是gitee推荐项目
    isGiteeSupport = False if (len(item.select('i[title="Gitee 推荐项目"]')) == 0) else True
    # 是否GVP - Gitee 最有价值开源项目
    isGVP = False if (len(item.select('a[title="GVP - Gitee 最有价值开源项目"]')) == 0) else True
    # 项目简介
    desc = item.find_all(class_=["project-desc", "mb-1"])[0].text
    # 项目主要语言
    mainLanguage = item.find_all(class_=["project-language", "project-item-bottom__item"])[0].text
    # starNum
    starNum = item.find_all('a',
                            class_=["project-stars-count-box", "js-project-stars-count-box", "star-container", "d-flex",
                                    "d-flex-center"])[0].div.attrs['data-count']
    print("getOneItem", "获取到项目", title, "成功", end='')
    return [imgSrc, title, targetUrl, isGiteeSupport, isGVP, desc, mainLanguage, starNum]


def getList(targetUrl, targetName):  # 爬取当前分类下的所有项目列表
    answerList = []
    print("开始获取", targetName.replace("\n", " "), "分类下的所有项目")
    soup = bs4.BeautifulSoup(askURL(targetUrl), "html.parser")
    numList = soup.select('div[id="git-discover-page"] a[class=item]')
    try:
        pageNum = soup.select('div[id="git-discover-page"] a[class=item]')[-2].text
    except Exception as e:
        print("error 找不到总数 36")
        pageNum = 1
    if "?" in targetUrl:
        url = targetUrl + "&page="
    else:
        url = targetUrl + "?page="
    for i in range(1, int(pageNum) + 1):  # 开始循环部分
        print("正在获取>", targetName.replace("\n", " "), "<分类下项目", "开始第%d页" % int(i), "共%d页" % int(pageNum))
        htmlOne = askURL(url + str(i))
        soupOne = bs4.BeautifulSoup(htmlOne, "html.parser")
        list = soupOne.select('.explore-repo__list div[class=item]')  # 当前页中的项目列表
        for item in list:
            answerList.append(getOneItem(baseurl, item))
    return answerList


def getData(baseurl):  # 爬取所有推荐项的项目列表并返沪 返回一个二维数组，第二维是项目详细信息
    """[[imgSrc, title, targetUrl, isGiteeSupport, isGVP, desc, mainLanguage, starNum],]"""
    targetUrl = baseurl + "/explore/all"
    targetName = "全部推荐项目"
    answerList = []
    answerList = getList(targetUrl, targetName)
    return answerList


def insertSql(conn, item):  # 保存单个鲜蘑菇
    """[[imgSrc, title, targetUrl, isGiteeSupport, isGVP, desc, mainLanguage, starNum],]"""
    print("开始插入元素")
    targetUrl = item[2]
    title = item[1]
    imgSrc = item[0]
    isGiteeSupport = item[3]
    isGVP = item[4]
    desc = item[5]
    mainLanguage = item[6]
    starNum = item[7]
    print("开始插入元素", targetUrl)
    sql = 'insert into projectList values (\"'
    sql += str(targetUrl).replace("\"", " ") + '","'
    sql += str(title).replace("\"", " ") + '","'
    sql += str(imgSrc).replace("\"", " ") + '","'
    sql += str(isGiteeSupport).replace("\"", " ") + '","'
    sql += str(isGVP).replace("\"", " ") + '","'
    sql += str(desc).replace("\"", " ") + '","'
    sql += str(mainLanguage).replace("\"", " ") + '",'
    sql += starNum + ');'
    print(sql)
    # sql = "insert into projectList values (" + targetUrl + "," + title + "," + imgSrc + "," + isGiteeSupport + "," + isGVP + "," + desc + "," + mainLanguage + "," +starNum)"
    c = conn.cursor()  # 获取游标
    print("执行sql:", end="")
    c.execute(sql)  # 执行sql语句
    conn.commit()  # 提交数据库操作
    print("插入成功", targetUrl)


def saveDataBySql(projects):
    print("开始保存数据")
    conn = sqlite3.connect("./database/giteeSpider.db")
    print("成功打开数据库")
    for index in range(len(projects)):
        print("共有",len(projects),"已执行",index,end="")
        item = projects[index]
        insertSql(conn, item)
    conn.close()  # 关闭数据库链接
    print("保存完成--")


if __name__ == '__main__':
    baseurl = "https://gitee.com"
    # 1爬取数据
    projects = getData(baseurl)
    # 2保存数据
    saveDataBySql(projects)
    pass
