#!/usr/bin/python
# coding:utf-8
from urllib import request
from bs4 import BeautifulSoup
import os
import time

"""
关于我转生后成为史莱姆的那件事
https://www.mkzhan.com/209657/
"""
head = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36",
    "Connection": "keep-alive"
}


def GetChapterURL(url):
    """
    从漫画的目录页面获取各章节的URL
    :param url:(str) 漫画目录页面的URL
    :return:(yield) (str(章节名),str(章节URL))
    """
    req = request.Request(url=url, headers=head)
    html = request.urlopen(req).read().decode("utf8")
    soup = BeautifulSoup(html, 'lxml')
    for i in soup.find_all("a", class_="j-chapter-link"):
        string = i.string
        if string == None:
            string = ""
            string = str(i).split(" ")
            string = string[-3] + string[-2]

        string = string.replace(" ", "")
        string = string.replace("\n", "")
        yield string, "https://www.mkzhan.com" + i.get("data-hreflink")
    pass


def GetImgURL(url):
    """
    从各章节URL中获取该章节内的漫画img URL
    :param url:(str) 章节URL
    :return:(yield) 章节内相关img URL
    """
    req = request.Request(url, headers=head)
    html = request.urlopen(req).read()
    soup = BeautifulSoup(html, 'lxml')
    for i in soup.find_all("img", class_="lazy-read"):
        yield i.get("data-src")
    pass


def SaveImg(url, path=None):
    """
    将获取到的img URL 获取img然后保存到path下
    :param url: (str) img URL
    :param path: (str) img保存跟目录
    :return: 保存图片到path下
    """
    # 创建目录
    dir = path[:path.rfind("/")]
    if not os.path.exists(dir):
        os.makedirs(dir)

    req = request.urlopen(url).read()
    with open(path + ".jpg", 'wb') as jpg:
        jpg.write(req)
    pass


def ComicsFile(path, url):
    """
    将爬到的漫画图片分章节存到jpg中
    :param path:str(jpg存放根目录)
    :param url:str(漫画目录url)
    :return:
    """
    for chapter in GetChapterURL(url):
        print(chapter[0], end="\n")
        i = 1
        for imgurl in GetImgURL(chapter[1]):
            SaveImg(imgurl, path + "/" + chapter[0] + "/" + str(i))
            i += 1
    pass


def SaveImgIntoDatabase(url, path, mdb):
    """
    将img存到mysql中
    :param url: str(img url)
    :param path: str(
    :param mdb:dict(mysql数据库的连接信息)
    :return:
    """
    import pymysql
    db = pymysql.connect(mdb["host"], mdb['user'], mdb['password'], mdb['database'])
    cur = db.cursor()

    def inserdb(img, data):
        SQL = """
        CREATE TABLE if not EXISTS `%s` (
        `chapter` VARCHAR(100),
        `p`		  TINYINT,
        `img`	  mediumblob
        );
        """
        try:
            cur.execute(SQL, path)
            db.commit()
        except BaseException as bep:
            db.rollback()
            print(bep)
        SQL = """
        INSERT into `%s`(`chapter`,`p`,`img`)
        VALUES(%s,%s,%s)
        """
        try:
            cur.execute(SQL, (path, data["chapter"], data["p"], data["img"]))
            db.commit()
        except pymysql.err.DatabaseError as dbe:
            db.rollback()
            print(dbe)
        except BaseException as bep:
            db.rollback()
            print(bep)
        pass

    for chapter in GetChapterURL(url):
        i = 1
        for imgurl in GetImgURL(chapter[1]):
            img = request.urlopen(imgurl).read()
            print(chapter[0])
            inserdb(
                img,
                {
                    "chapter": chapter[0],
                    "p": i,
                    "img": img
                }
            )
            i += 1

        pass


if __name__ == "__main__":
    # https://www.mkzhan.com/209657/506796.html
    path = "关於我转生变成史莱姆这档事"
    url = "https://www.mkzhan.com/209657/"
    # ComicsFile(path, url)
    # mdb = {
    #     "host": "localhost",
    #     "user": "aloha",
    #     "password": "root",
    #     "database": "Comics"
    # }
    # SaveImgIntoDatabase(url, path, mdb)
    import os
    path = os.path.dirname(os.path.abspath(__file__))
    path = os.path.join(path,"关于我转生成为史莱姆这档事")
    print(path)
    ComicsFile(path,url)
    print("finish")