#!/usr/bin/python
# -*-coding:utf-8-*-
import random
import time
from _socket import timeout
from urllib.error import HTTPError
from urllib.request import urlopen
from urllib.parse import unquote, quote
from urllib.request import Request
from urllib import parse
from bs4 import BeautifulSoup as bs
import pymysql.cursors
import os.path

from urllib3.util import timeout


class music_51ape:
    def __init__(self, id=None, title=None, singer=None, suffix=None, url=None, password=None,
                 size=None, release_date=None,
                 lyrics=None,
                 flag=None,
                 download_url=None,
                 original_url=None):
        self.id = id
        self.title = title
        self.singer = singer
        self.suffix = suffix
        self.url = url
        self.password = password
        self.size = size
        self.release_date = release_date
        self.lyrics = lyrics
        self.flag = flag
        self.download_url = download_url
        self.original_url = original_url


def replace(ma):
    # 获取数据连接
    connection = pymysql.connect(host="127.0.0.1", user="root", password="root", db="common", charset="utf8mb4")
    cursor = connection.cursor()
    try:
        # 创建sql语句
        sql = "replace into `music_info`(`id`,`title`,`singer`,`suffix`,`url`,`password`,`size`,`release_date`,`lyrics`) " \
              "values(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
        # 执行sql语句
        cursor.execute(sql, (
            ma.id, ma.title, ma.singer, ma.suffix, ma.url, ma.password, ma.size, ma.release_date,
            ma.lyrics))
        # 提交
        connection.commit()
    except Exception as e:
        print(e)
        # 错误回滚
        connection.rollback()
    finally:
        connection.close()


def user_proxy(proxy_addr, url):
    import urllib.request
    print(proxy_addr)
    proxy = urllib.request.ProxyHandler({'http': proxy_addr})
    opener = urllib.request.build_opener(proxy, urllib.request.HTTPHandler)
    urllib.request.install_opener(opener)
    data = urllib.request.urlopen(url, timeout=5).read().decode('utf-8')
    return data


def get_music_in_order(start, end):
    """
    根据ID顺序获取音乐下载链接
    """
    proxy_addr_arr = ["210.1.58.201:8080",
                      "83.64.224.70:21776",
                      # "37.147.124.188:51500",
                      # "119.179.170.186:8060"
                      ]
    for x in range(start, end):
        url = "http://www.51ape.com/ape/%s.html" % x

        # 记录起始时间
        start_time = time.time()
        seconds, minutes, hours = 0, 0, 0
        try:
            response = urlopen(url, timeout=5)
            page = response.read().decode("utf-8")
            # page = user_proxy(random.choice(proxy_addr_arr), url)
            soup = bs(page, "html.parser")
            ma = music_51ape()
            ma.id = x
            for child in soup.select("div ul[class='b_b_s over']"):
                ma.singer = child.select_one("a[class='fl c3b ml_1 mt_08']").get("title")
                ma.title = child.select_one("li[class='fl ml_1 mt_08 c999']").text
            ma.suffix = soup.select_one("h1[class='yh mt_1 f_32']").text.split(".")[-1][:6]
            ma.size = soup.select("div[class='fl over w638'] h3")[2].text[1:][:8]
            ma.release_date = soup.select("div[class='fl over w638'] h3")[4].text[1:]
            ma.url = soup.select_one("div[class='fl over w638'] a[class='blue a_none']").get("href")
            ma.password = soup.select_one("div[class='fl over w638'] b[class='mt_1 yh d_b']").text.split("：")[-1]
            ma.lyrics = soup.select_one("div#newstext_2").text
            replace(ma)
            print((ma.id, ma.title, ma.singer, ma.suffix, ma.url, ma.password, ma.size,
                   ma.release_date))
        except HTTPError as http:
            if http.msg == "Internal Server Error" or http.msg == "Forbidden":
                print("\033[31mID：%s %s 网页获取失败1：%s\033[0m" % (x, url, http.msg))
                get_music_in_order(x, end)
                break
            elif http.msg == "Not Found":
                print("ID：%s %s 网页获取失败1：%s" % (x, url, http.msg,))
                continue
            else:
                print("\033[31mID：%s %s 网页获取失败1：%s\033[0m" % (x, url, http.msg))
                get_music_in_order(x, end)
                break
        except BaseException as es:
            print("\033[31mID：%s %s 网页获取失败2：%s\033[0m" % (x, url, es))
            get_music_in_order(x, end)
            break
        except ConnectionResetError as cre:
            print("\033[31mID：%s %s ConnectionResetError：%s\033[0m" % (x, url, cre))
            get_music_in_order(x, end)
            break
        finally:
            # 计算时间差值
            seconds = int(time.time() - start_time)
            # 可视化打印
            # hours = seconds // 3600
            # minutes = (seconds - hours * 3600) // 60
            # seconds = seconds - hours * 3600 - minutes * 60
            print("耗时：{:>02d}:{:>02d}:{:>02d}".format(hours, minutes, seconds))


def init_music(ma):
    # 获取数据连接
    connection = pymysql.connect(host="127.0.0.1", user="root", password="root", db="common", charset="utf8mb4")
    cursor = connection.cursor()
    try:
        # 创建sql语句
        sql = "SELECT `id`,`flag` FROM `music_info` WHERE id=%s"
        # 执行sql语句
        cursor.execute(sql, ("51ape_%s" % ma.id))
        # 获取所有记录列表
        results = cursor.fetchall()
        if len(results) == 0:
            # 创建sql语句
            sql = "INSERT into `music_info`(`id`,`title`,`singer`,`flag`,`original_url`) " \
                  "values(%s,%s,%s,%s,%s)"
            # 执行sql语句
            cursor.execute(sql, ("51ape_%s" % ma.id, ma.title, ma.singer, 0, ma.original_url))
            # 提交
            connection.commit()
        else:
            # 创建sql语句
            sql = "update `music_info` set `title`=%s,`singer`=%s,`original_url=%s` where `id`=%s"
            # 执行sql语句
            cursor.execute(sql, (ma.title, ma.singer, ma.original_url, "51ape_%s" % ma.id))
            # 提交
            connection.commit()
    except Exception as e:
        # 错误回滚
        connection.rollback()
    finally:
        connection.close()


def get_music_by_theme_page(theme, page):
    if page == 1:
        url = "http://www.51ape.com/%s/index.html" % (quote(theme))
    else:
        url = "http://www.51ape.com/%s/index_%s.html" % (quote(theme), page)
    print("\033[36m theme_page = %s\033[0m" % url)
    response = urlopen(url, timeout=5)
    page = response.read().decode("utf-8")
    soup = bs(page, "html.parser")
    for child in soup.select("ul li[class='blk_nav lh30 over']"):
        m5 = music_51ape()
        m5.original_url = child.select_one("a").get("href")
        m5.id = m5.original_url.split("/")[-1][0:-5]
        m5.title = child.select_one("a").text
        init_music(m5)
        print(m5.id, m5.title, m5.singer, m5.original_url)


def get_music_by_theme(theme):
    url = "http://www.51ape.com/%s/" % quote(theme)
    response = urlopen(url, timeout=5)
    page = response.read().decode("utf-8")
    soup = bs(page, "html.parser")
    # 页数 body > div.bg_wh.all.m > div > div.fl.over.w638 > div.fl.over.w638.mt_2 > div.mt_1.listpage.b_t_d.b_b_d.lh50 > a:nth-child(15)
    pages = soup.select("div.mt_1.listpage.b_t_d.b_b_d.lh50 a")[-1].get("href").split("_")[1][0:-5]
    for x in range(43, int(pages) + 1):
        get_music_by_theme_page(theme, x)


def get_music_by_singer(singer):
    url = "http://www.51ape.com/skin/ape/php/qx_2.php?qx=%s" % quote(singer)
    response = urlopen(url, timeout=5)
    page = response.read().decode("utf-8")
    soup = bs(page, "html.parser")
    for child in soup.select("div[class='w260 wd m mt_1 over'] li.lh30"):
        m5 = music_51ape()
        m5.original_url = child.select_one("a").get("href")
        m5.id = m5.original_url.split("/")[-1][0:-5]
        m5.title = child.select_one("a").text
        m5.singer = singer
        m5.flag = 0
        init_music(m5)
        print(m5.id, m5.title, m5.singer, m5.original_url)


def get_singers():
    url = "http://www.51ape.com/artist/"
    try:
        response = urlopen(url, timeout=5)
        page = response.read().decode("utf-8")
        soup = bs(page, "html.parser")
        for child in soup.select("div[class='gs_a']"):
            href = child.select_one("a").get("href")
            if href == "javascript:void(0)":
                get_music_by_singer(child.select_one("a span").text)
            else:
                get_music_by_singer(href.split("/")[-2])
    except BaseException as es:
        print("\033[31mID：%s 网页获取失败2：%s\033[0m" % (url, es))
    except ConnectionResetError as cre:
        print("\033[31mID： %s ConnectionResetError：%s\033[0m" % (url, cre))


def update_url_music(ma):
    # 获取数据连接
    connection = pymysql.connect(host="127.0.0.1", user="root", password="root", db="common", charset="utf8mb4")
    cursor = connection.cursor()
    try:
        # 创建sql语句
        sql = "SELECT `id`,`flag` FROM `music_info` WHERE id=%s"
        # 执行sql语句
        cursor.execute(sql,  "51ape_%s" % ma.id)
        # 获取所有记录列表
        results = cursor.fetchall()
        if len(results) == 0:
            # 创建sql语句
            sql = "INSERT into `music_info`(`id`,`title`,`singer`,`flag`,`original_url`) " \
                  "values(%s,%s,%s,%s,%s)"
            # 执行sql语句
            cursor.execute(sql, ( "51ape_%s" % ma.id, ma.title, ma.singer, 0, ma.original_url))
            # 提交
            connection.commit()
        else:
            # 创建sql语句
            sql = "update `music_info` set `title`=%s,`singer`=%s,`original_url=%s` where `id`=%s"
            # 执行sql语句
            cursor.execute(sql, (ma.title, ma.singer, ma.original_url, "51ape_%s" % ma.id))
            # 提交
            connection.commit()
    except Exception as e:
        # 错误回滚
        connection.rollback()
    finally:
        connection.close()


def get_music_url_by_init():
    """
    从数据库中获取未获得地址的歌曲并获取地址
    :return:
    """
    # 获取数据连接
    connection = pymysql.connect(host="127.0.0.1", user="root", password="root", db="common", charset="utf8mb4")
    cursor = connection.cursor()
    try:
        # 创建sql语句
        sql = "SELECT `id`,`original_url`,`flag` FROM `music_info` WHERE `flag`=0"
        # 执行sql语句
        cursor.execute(sql)
        # 获取所有记录列表
        results = cursor.fetchall()
        for row in results:
            id = row[0]
            url = row[1]
            # 记录起始时间
            start_time = time.time()
            seconds, minutes, hours = 0, 0, 0
            try:
                response = urlopen(url, timeout=5)
                page = response.read().decode("utf-8")
                # page = user_proxy(random.choice(proxy_addr_arr), url)
                soup = bs(page, "html.parser")
                ma = music_51ape()
                ma.id = id
                for child in soup.select("div ul[class='b_b_s over']"):
                    ma.singer = child.select_one("a[class='fl c3b ml_1 mt_08']").get("title")
                    ma.title = child.select_one("li[class='fl ml_1 mt_08 c999']").text
                ma.suffix = soup.select_one("h1[class='yh mt_1 f_32']").text.split(".")[-1][:6]
                ma.size = soup.select("div[class='fl over w638'] h3")[2].text[1:][:8]
                ma.release_date = soup.select("div[class='fl over w638'] h3")[4].text[1:]
                ma.url = soup.select_one("div[class='fl over w638'] a[class='blue a_none']").get("href")
                ma.password = soup.select_one("div[class='fl over w638'] b[class='mt_1 yh d_b']").text.split("：")[-1]
                ma.lyrics = soup.select_one("div#newstext_2").text
                print((ma.id, ma.title, ma.singer, ma.suffix, ma.url, ma.password, ma.size,
                       ma.release_date))
                # 创建sql语句
                sql = "update `music_info` set `title`=%s,`singer`=%s," \
                      "`suffix`=%s,`url`=%s,`password`=%s,`size`=%s,`release_date`=%s,flag=1 where `id`=%s"
                # 执行sql语句
                cursor.execute(sql, (ma.title, ma.singer, ma.suffix, ma.url, ma.password, ma.size,
                                     ma.release_date, ma.id))
                # 提交
                connection.commit()
            except HTTPError as http:
                if http.msg == "Internal Server Error" or http.msg == "Forbidden":
                    print("\033[31mID：%s %s 网页获取失败1：%s\033[0m" % (id, url, http.msg))
                    get_music_url_by_init()
                    break
                elif http.msg == "Not Found":
                    print("ID：%s %s 网页获取失败1：%s" % (id, url, http.msg,))
                    continue
                else:
                    print("\033[31mID：%s %s 网页获取失败1：%s\033[0m" % (id, url, http.msg))
                    get_music_url_by_init()
                    break
            except BaseException as es:
                print("\033[31mID：%s %s 网页获取失败2：%s\033[0m" % (id, url, es))
                get_music_url_by_init()
                break
            except ConnectionResetError as cre:
                print("\033[31mID：%s %s ConnectionResetError：%s\033[0m" % (id, url, cre))
                break
            finally:
                # 计算时间差值
                seconds = int(time.time() - start_time)
                # 可视化打印
                # hours = seconds // 3600
                # minutes = (seconds - hours * 3600) // 60
                # seconds = seconds - hours * 3600 - minutes * 60
                print("耗时：{:>02d}:{:>02d}:{:>02d}".format(hours, minutes, seconds))
    except Exception as e:
        # 错误回滚
        connection.rollback()
    finally:
        connection.close()


if __name__ == "__main1__":
    # get_singers()
    # get_music_by_theme("zhuanji")
    # get_music_by_theme("apeyinyue")
    # get_music_by_theme("flacyinyue")
    # get_music_by_theme("wavyinyue")
    # get_music_by_theme("yueyu")
    # get_music_by_theme("yingwen")
    # get_music_by_theme("经典歌曲")
    get_music_url_by_init()

if __name__ == "__main__":
    i = 168500  # 168387  79325
    get_music_in_order(i, 200000)
