import  requests
import urllib.parse
from urllib.parse import quote, urlunparse

from missav.test_sql1 import cur,con
import requests,os,time

import platform
requests.packages.urllib3.disable_warnings()
import ssl
import json
ssl._create_default_https_context = ssl._create_unverified_context
import httplib2
httplib2.HTTPConnectionWithTimeout._http_vsn=10
httplib2.HTTPConnectionWithTimeout._http_vsn_str = 'HTTP/1.0'
import sys
print(sys.getrecursionlimit())
sys.setrecursionlimit(65530)

requests.DEFAULT_RETRIES = 100


def get_domain(url):
    parsed_url = urllib.parse.urlparse(url)
    return parsed_url.netloc

def get_url(uri,netloc):
    scheme = "https"

    path = quote(uri)

    url = urlunparse((scheme, netloc, path, None, None, None))
    return url


from bs4 import BeautifulSoup


url = "https://xiaoyakankan.com/cat/15.html"
netloc = get_domain(url)
def get_catalegory_list():
    url = "https://xiaoyakankan.com/cat/15.html"
    res = requests.get(url)

    soup = BeautifulSoup(res.text)

    div = soup.find("div", class_="gm-meta").find_all('a')
    print()
    data=[]
    for a in div[1:-1]:
        '''
        <a href="https://javtree.com/" target="_blank">😍 日本AV</a>  -1
        <a class="on" href="/cat/15.html">全部福利</a> 0
        '''
        uri = a.get('href')
        name= a.get_text()
        # print('name',name)
        url = get_url(uri,netloc)
        data.append((name,url))
    return data


# data = get_catalegory_list()
res = cur.execute("select * from movie_category")
data = res.fetchall()


try:
    cur.executemany("INSERT INTO movie_category VALUES(?, ?)", data)
    con.commit()  # Remember to commit the transaction after executing INSERT
except Exception as e:
    print(e)

    # con.close()

# print(data)
# data = [{'name': '韩国情色片', 'url': 'https://xiaoyakankan.com/cat/1551.html'}]
# data = [('韩国情色片',  'https://xiaoyakankan.com/cat/1551.html')]

def get_movie(data):
    while True:
        if len(data)==0:
            break
        i=data.pop()
        res = requests.get(i[-1])
        sets = set()
        if i[-1] in sets:
            continue
        sets.add(i[-1])

        name=i[0]
        bs = BeautifulSoup(res.text)
        a_list = bs.find("div", class_="gm-list").find_all("div", class_="item")
        for t in a_list:

            uri =t.find('a').get('href')
            url = get_url(uri, netloc)
            print(url)
            title =t.find('a').find('img').get('alt')
            print(title)
            try:
                cur.execute("INSERT INTO movie VALUES(?, ?,?)", (name, title, url))
            except Exception as e:
                print(e)
                continue

        con.commit()


def get_page():
    while True:
        if len(data)==0:
            break
        i=data.pop()
        res = requests.get(i[-1])
        print(i[-1])
        # sets = set(data)
        # if i[-1] in sets:
        #     continue
        # sets.add(i[-1])
        name = i[0]
        bs = BeautifulSoup(res.text)
        #     翻页
        p_list = bs.find("div", class_="gm-page").find_all("li", class_="pager")
        for p in p_list:
            c = p.get('class')
            if "on" in c:
                pass
            else:
                href = p.find('a').get('href')
                url = get_url(href, netloc)
                # print(url)
                # if len(data)>10:
                #     time.sleep(20)
                tmp=(name,  url)
                data.append(tmp)
                try:
                    cur.execute("INSERT INTO movie_category VALUES(?, ?)", tmp)
                except Exception as e:
                    print(e)
                    continue

        con.commit()


# from threading import Thread
#
# t1 = Thread(target=get_movie(data))
# # t1 = Thread(target=get_page())
# t1.start()
# t1.join()
#
#
# con.close()









