# @author:Wei Junjie
# @time:2024/5/16 23:21
# @file spider01.py
# --*--coding: utf-8 --*--
"""
https://b.aqdyim.com/ 爬取磁力链接和图片的实现

"""
import random
from urllib import request, parse
import re, time, os, csv
from fake_useragent import UserAgent
import pymysql
from pymongo import MongoClient

IMG_SAVE_PATH = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'sourceStore','IMG','blue_actress')
HTML_PATH_SAVE = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'sourceStore','HTML','blue_actress')
TEXT_PATH_SAVE = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'sourceStore','TEXT','blue_actress')
LOG_PATH_SAVE = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'sourceStore','LOG','blue_actress')
VIDEO_PATH_SAVE =os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'sourceStore','VIDEO')


def quchong(ls):
    if not ls:
        return []
    first = ls[0]
    remain = ls[1:]
    # 不取
    if first in remain:
        return quchong(remain)
    # 取
    return [first] + quchong(remain)


class Love_Movie_Spider:
    def __init__(self):
        self.url = 'https://b.aqdyim.com/search.asp?page={}&searchword={}&searchtype=-1'

    def get_header(self):
        ua = UserAgent()
        headers = {'User-Agent': ua.random,
                   'cookie': 'Hm_lpvt_965b01a2c8840a1c2931af2f317f0edd=%s' % (int(time.time()))}
        return headers

    # 获取响应内容
    def search_actionMovie(self, url: str) -> str:
        headers = self.get_header()
        req = request.Request(url, headers=headers)
        res = request.urlopen(req, timeout=60)
        html = res.read().decode('gbk')
        return html

    # 保存HTML文件
    def save_html(self, filename: str, html) -> None:
        with open(filename, 'w') as f:
            f.write(html)

    # 保存图片
    def get_picture(self, filename: str, content, ways='download'):
        pattern = 'https://c.aqdypic.com/pic/uploadimg/\S+/\S+.jpg'
        url_list = re.findall(pattern, content)
        if ways == "download":
            for url in url_list:
                timer = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(time.time()))
                temp_name = os.path.join(IMG_SAVE_PATH,filename + '-' + timer + '.jpg')
                request.urlretrieve(url, temp_name)
                time.sleep(random.randint(1, 5))

        elif ways == "save":
            return url_list

    # 保存磁力链接
    def get_BT(self, content: str, name: str, picture_list, page, db,mongodb) -> None:
        base_url = 'https://b.aqdyim.com/'
        pattern = '(/shechu/\w+)|(/shebao/\w+)|(/lusi/\w+)'
        url_list = quchong(re.findall(pattern, content))
        # 创建数据库
        db.create(name)

        sql_data = []
        execl_data = []
        mongo_datas=[]
        print(f"爬取第{page}页中......")
        for index, url in enumerate(url_list):
            temp_url = "".join(url)
            try:
                # 一个页面的信息
                mongo_data={}
                req = self.search_actionMovie(base_url + temp_url)
                pattern1 = '<title>(.*)</title>'
                title = ''.join(re.search(pattern1, req).groups())
                pattern2 = 'GvodUrls =.*\("(.*?)"'
                bt_url=re.search(pattern2, req)
                if bt_url:
                    bt_cili = bt_url.group(1)
                else:
                    bt_cili='沒有可用的磁力鏈接'
                pattern3 = '<span id="addtime">(.*)</span>'
                add_time=re.search(pattern3,req).group(1)
                _date,daytime=add_time.split(' ')
                add_time='%s-%s-%s'%tuple(_date.split('/'))+' %s'%daytime
                picture_data = request.urlopen(picture_list[index], timeout=60).read()

                print(title, bt_cili)

                mongo_data["title"] = title
                mongo_data['bt_cili'] = bt_cili
                mongo_data['picture'] = picture_data
                mongo_data['time']=add_time

                mongo_datas.append(mongo_data)
                sql_data.append(tuple([title, bt_cili, picture_data,add_time]))
                execl_data.append(tuple([title, bt_cili,add_time]))
                time.sleep(random.randint(5, 10))
            except Exception as e:
                date = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
                print(f'爬取第{page}页失败')
                information = (date + '->' + str(e) + '\n' +
                               date + '->' + f'爬取{page}页失败\n')
                log_info(name, information)
                return
        # 写入 csv保存
        with open(os.path.join(TEXT_PATH_SAVE,name + '.csv'), 'a', encoding='utf-8-sig', newline='') as f:
            writer = csv.writer(f)
            writer.writerows(execl_data)

        # 写入SQL保存
        db.insert(name, sql_data)

        # 写入MongoDB保存

        mongodb.dbInsert(mongo_datas)

    # 主函数
    def main(self):
        while True:
            db = Data_Sql()
            ways = input("请按数字提示输入命令".center(200, '*') + '\n'
                                                                   "1.获取图片:\n"
                                                                   "2.获取磁力链接:\n"
                                                                   "3.获取html源码:\n"
                                                                   "4.退出程序:\n")
            #
            if ways == '3':
                try:
                    # 获取输入
                    name = input("请输入你要搜索的演员名字:")
                    start_page = eval(input("请输入起始页码:"))
                    end_page = eval(input("请输入终止页:"))
                    kw = parse.quote(name, encoding='gbk')
                    for index in range(start_page, end_page + 1):
                        url = self.url.format(index, kw)
                        html = self.search_actionMovie(url)

                        self.save_html(os.path.join(HTML_PATH_SAVE,"{}{}.html".format(name, index)), html)
                        print(f'正在获取第{index}页')
                        time.sleep(random.randint(5, 10))
                    print('获取成功')
                    return
                except Exception as e:
                    print(e)

            elif ways == '1':
                try:
                    # 获取输入
                    name = input("请输入你要搜索的演员名字:")
                    start_page = eval(input("请输入起始页码:"))
                    end_page = eval(input("请输入终止页:"))
                    kw = parse.quote(name, encoding='gbk')
                    for index in range(start_page, end_page + 1):
                        url = self.url.format(index, kw)
                        html_content = self.search_actionMovie(url)
                        self.get_picture(name, html_content)
                except Exception as e:
                    print(e)

            elif ways == '2':
                name = input("请输入你要搜索的演员名字:")
                start_page = eval(input("请输入起始页码:"))
                end_page = eval(input("请输入终止页:"))
                kw = parse.quote(name, encoding='gbk')
                mongodb = Mongo('blue_actress', name)
                for index in range(start_page, end_page + 1):
                    url = self.url.format(index, kw)
                    html_content = self.search_actionMovie(url)
                    picture_list = self.get_picture(name, html_content, 'save')
                    self.get_BT(html_content, name, picture_list, index, db,mongodb)
                # 爬取完毕后关闭游标
                db.cursor.close()
                db.conn.close()

            elif ways == '4':
                print('bye')
                break
            else:
                print("指令输入错误请重试")
                continue

class Data_Sql:
    def __init__(self):
        self.name = 'blue_actress'
        self.ip = '192.168.0.105'
        self.port = 3306
        self.user = 'root'
        self.password = '123456'
        self.conn = self.connect()
        self.cursor = self.conn.cursor()

    def connect(self):
        conn = pymysql.connect(host=self.ip,
                               port=self.port,
                               user=self.user,
                               passwd=self.password,
                               db=self.name)
        return conn

    def insert(self, name, ls):
        db = self.conn
        try:
            cursor = self.cursor
            sql = 'insert into ' + name + '(title,bt,picture,add_time)' + ' value(%s,%s,%s,%s)'
            print(sql)
            cursor.executemany(sql, ls)
            db.commit()
        except Exception as e:
            db.rollback()
            print(e)

    def create(self, name):
        db = self.conn
        try:
            cursor = db.cursor()
            sql = 'create table if not exists ' + name + ('(id int primary key auto_increment, '
                                                          'title varchar(200),bt varchar(200),'
                                                          'picture blob,'
                                                          'add_time datetime default now()'
                                                          ');')
            cursor.execute(sql)
        except Exception as e:
            print(e)
            db.rollback()

    def search(self, name, data):
        try:
            SQL = 'select %s from %s;' % (data, name)
            self.cursor.execute(SQL)
            result = self.cursor.fetchall()
            return result
        except Exception as e:
            print(e)

    def search_data_page(self, name, data,page,per_page):
        page = int(page)
        per_page = int(per_page)
        start_page=(page-1)*per_page
        try:
            SQL = 'select %s from %s limit %s,%s;' % (data, name,start_page,per_page)
            self.cursor.execute(SQL)
            result = self.cursor.fetchall()
            return result
        except Exception as e:
            print(e)



    def page_count(self,name):
        try:
            SQL = 'select count(title) from %s ;'%(name)
            self.cursor.execute(SQL)
            result = self.cursor.fetchone()
            return result
        except Exception as e:
            print(e)


    def get_tables_name(self):
        try:
            SQL = 'show tables;'
            self.cursor.execute(SQL)
            result=self.cursor.fetchall()
            return result
        except Exception as e:
            print(e)

    def alter_tabel(self, name, col,ways ,type):
        db=self.conn
        SQL = 'alter table %s %s %s %s;' % (name,ways, col, type)
        print(SQL)
        try:
            self.cursor.execute(SQL)
            db.commit()
        except Exception as e:
            db.rollback()
            print(e)

    def table_show(self):
        SQL = 'show tables;'
        self.cursor.execute(SQL)
        res=self.cursor.fetchall()
        return res

class Mongo:
    def __init__(self,db,collection):
        self.client =MongoClient('localhost',27017)
        self.db=self.client[db]
        self.collection=self.create_table(collection)

    def create_table(self,table):
        if table not in self.db.list_collection_names():
            self.db.create_collection(table)
        else:
            print("%s collection already exists"%table)
        return self.db[table]


    def dbInsert(self,dic):
        print("insert........")
        if isinstance(dic,dict):
            self.collection.insert(dic)

        elif isinstance(dic,list):
            self.collection.insert_many(dic)


    def dbUpdate(self,dic,newdic):
        print("update......")
        self.collection.update(dic,newdic)


    def dbRemove(self,dic):
        print("removing......")
        self.collection.remove(dic)


    def dbSearch(self,key=None):
        print("searching ......")
        data=self.collection.find(key)
        return data


def log_info(name, info):
    with open(os.path.join(LOG_PATH_SAVE,name + '.txt'), 'a+', encoding='utf-8') as f:
        f.write(info)


if __name__ == '__main__':
    def sql_data():
        sql = Data_Sql()
        actress_list = sql.table_show()
        actress_list = tuple(map(lambda x: ''.join(x), actress_list))
        performed = []
        for name in actress_list:
            SQl_data = sql.search(name, 'title')
            real_data = tuple(map(lambda x: ''.join(x), SQl_data))
            performed.append(real_data)
        res_tuple = ()
        for data in performed:
            res_tuple += data

        print(res_tuple)


    sql_data()


