# -*- coding: utf-8 -*-

import  sys
import os
import urllib.request
from urllib.request import urlopen
from bs4 import BeautifulSoup
import re
import json
import time
import string

import os




class Log(object):
    logdir=""
    @staticmethod
    def _write(sfile,sstr):
        sfile = os.path.join(Log.logdir,sfile)
        if not isinstance(sstr,str):
            sstr = str(sstr)
        with open(sfile,"a+") as f:
            ctime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
            swrite = "%s [%s,%s,%d]: %s\n"%(ctime,
                                       sys._getframe().f_code.co_filename,
                                       sys._getframe().f_code.co_name,
                                        sys._getframe().f_lineno,
                                        sstr)
            f.write(swrite)
    @staticmethod
    def debug(sstr):
        ctime = time.strftime("%Y-%m-%d",time.localtime())
        filename = "%s.debug"%(ctime)
        Log._write(filename,sstr)

    @staticmethod
    def error(sstr):
        ctime = time.strftime("%Y-%m-%d", time.localtime())
        filename = "%s.error" % (ctime)
        Log._write(filename, sstr)

    @staticmethod
    def info(sstr):
        ctime = time.strftime("%Y-%m-%d", time.localtime())
        filename = "%s.info" % (ctime)
        Log._write(filename, sstr)

    @staticmethod
    def warning(sstr):
        ctime = time.strftime("%Y-%m-%d", time.localtime())
        filename = "%s.warning" % (ctime)
        Log._write(filename, sstr)

def del_dir_tree(path):
    ''' 递归删除目录及其子目录,　子文件'''
    if os.path.isfile(path):
        try:
            os.remove(path)
        except Exception as e:
            print(e)
            Log.error(e)
    elif os.path.isdir(path):
        for item in os.listdir(path):
            itempath = os.path.join(path, item)
            del_dir_tree(itempath)
        try:
            os.rmdir(path)  # 删除空目录
        except Exception as e:
            Log.error(e)
            print(e)

class SongObj(object):
    def __init__(self):
        self.id = 0
        self.name = ""
        self.target_url = ""
        self.ownername = ""
        self.title =""
        self.singer=""
        self.suffix=""
    def getsuffix(self):
        ts = self.target_url.split(".")
        tn = len(ts)
        if tn>1:
            self.suffix = ts[tn-1]
    def parse(self,sstr=""):
        #print("====",sstr)
        sstr=sstr.replace("{",'')
        sstr=sstr.replace("}",'')
        sp1 = sstr.split(',')
        for i1 in sp1:
            sp2 = i1.split(':')
            if len(sp2) >=2:
                key = sp2[0].replace(' ','')
                value = sp2[1]
                if value.find("http")>0:
                    value = sp2[1]+":"+sp2[2]
                value = value.replace('"','')
                value = value.replace(' ','')
                value = value.replace('\'','')
                #print(key,value)
                if key == "name":
                    self.name = value
                elif key == "id":
                    self.id = int(value)
                elif key == "mp3":
                    self.target_url = value
                elif key == "title":
                    self.title = value
                elif key == "singer":
                    self.singer = value
        self.getsuffix()
        return len(self.target_url)>0 and len(self.name)>0 and len(self.suffix)
    def toString(self):
        return "ower = %s,id=%d,title=%s,name=%s,url=%s"%(self.ownername,self.id,self.title,self.name,self.target_url)

class SongCover(object):
    def __init__(self,id =0,url="",name="",owner=""):
        self.id = id
        self.target_url = url
        self.name = name
        self.song_list=[]
        self.owner=owner
    def urlopen(self):
        try:
            self.target_url = urllib.parse.quote(self.target_url,safe=string.printable)
            self.url_read_handle = urllib.request.urlopen(self.target_url)
            return True
        except Exception as  e:
            errmsg = "open url <%s> falied,because=%s"%(self.target_url,e)
            print(errmsg)
            Log.error(errmsg)
            return False

    def getVarMlist(self,t1):
        tstr1 = []
        for i in t1:
            if i.get_text().find("var mlist") > 0:
                p = re.compile(r'var mlist = \[.*\]')
                tmp = p.findall(i.get_text())
                for i in tmp:
                    if i is not None:
                        tstr1.append(i)
        return tstr1

    def parseUrl(self):
        print(self.toString())
        if not self.urlopen():
            return False
        data = self.url_read_handle.read()
        soup = BeautifulSoup(data.decode("utf-8"), "html.parser")
        t1 = soup.find_all('script', type="text/javascript")
        mlist = self.getVarMlist(t1)
        for it in mlist:
            ts = it.split('=')
            if len(ts) >= 2:
                str = ts[1].replace('[', '')
                str = str.replace(']', '')
                while len(str)>2:
                    index1 = str.find('{')
                    index2 = str.find('}') + 1
                    tsongStr = str[index1:index2]
                    str = str[index2:]
                    tsong = SongObj()
                    tsong.ownername = self.name
                    if tsong.parse(tsongStr):
                        self.song_list.append(tsong)
                        print(tsong.toString())
                        Log.info(tsong.toString())
                    else:
                        pass
                        #return False
        return True
    def toString(self):
        return "id=%d,url=%s ,name=%s"%(self.id,self.target_url,self.name)

    def download(self,ttop):
        topdir = os.path.join(ttop,self.name)
        if not os.path.exists(topdir):
            os.makedirs(topdir)

        songlistfile = os.path.join(topdir,"songlist")
        if os.path.exists(songlistfile):
            os.remove(songlistfile)
        with open(songlistfile,"a+") as f:
            for siten in self.song_list:
                f.write(siten.toString())
                f.write("\n")
        for siten in self.song_list:
            songname = os.path.join(topdir,siten.name+"."+siten.suffix)
            print(songname)
            self.target_url = siten.target_url
            if not self.urlopen():
                Log.error(siten.toString())
                Log.error("")
                continue
                #return False
            tdata = self.url_read_handle.read()
            #with open(songname,"wb+") as f:
            #    f.write(tdata)
            tfile = None
            try:
                tfile = open(songname,"wb+")
                tfile.write(tdata)
            except Exception as e:
                errmsg = "open file failed<%s>:%s"%(songname,e)
                print(errmsg)
                Log.error(errmsg)
            finally:
                if tfile :
                    tfile.close()


class Reptile(object):
    def __init__(self):
        self.target_url = "http://mp3.61ertong.com/"
        self.url_read_handle = None
        self.subLinkTopList=[]
        self.downloaddir = "D:\download"
    def urlopen(self):
        try:
            self.target_url = urllib.parse.quote(self.target_url,safe=string.printable)
            self.url_read_handle = urllib.request.urlopen(self.target_url)
            return True
        except Exception as  e:
            errinfo = "open url <%s> falied,because=%s"%(self.target_url,e)
            print(errinfo)
            Log.error(errinfo)
            return False
    def urlreadAndParse1(self):
        data = self.url_read_handle.read()
        soup = BeautifulSoup(data.decode("utf-8"),"html.parser")
        #print(soup.prettify())
        t1 = soup.find(id="wrapper1")
        num = 0
        self.subLinkTopList.clear()
        for i in t1:
            if len(i)>1:
                self.subLinkTopList.append(SongCover(num,i.a.get('href'),i.a.get_text(),"wrapper1"))
                num = num+1

        t2 = soup.find(id="wrapper2")
        for i in t2:
            if len(i)>1:
                self.subLinkTopList.append(SongCover(num,i.a.get('href'),i.a.get_text(),"wrapper2"))
                num = num+1


        for i in self.subLinkTopList:
            Log.info(i.parseUrl)
            i.parseUrl()
    def download(self):
        for i in self.subLinkTopList:
            tpath = os.path.join(self.downloaddir,i.owner)
            if not os.path.exists(tpath):
                os.makedirs(tpath)
            i.download(tpath)

    def test(self):
        if not self.urlopen():
            return False
        self.urlreadAndParse1()
        self.download()
if __name__ == "__main__":
    print("nihao")
    t= Reptile()
    Log.logdir = t.downloaddir
    if os.path.exists(t.downloaddir):
            del_dir_tree(t.downloaddir)
    os.makedirs(t.downloaddir)
    t.test()