#!/usr/bin/env python3.6
import re

from gevent import monkey;monkey.patch_socket()

import requests
from bs4 import BeautifulSoup
import gevent
from sqllite3_db import MovieDB
from magnetic_link import get_magnetic_link
from url_heade import read_Headers

head=read_Headers()


def get_data(moviedb,host, url, year, arear, type, name, ):
    try:
        html1 = requests.get(url, headers=head)

        soup = BeautifulSoup(html1.text, "html5lib")
        b = soup.select(".attachlist td a ", )
        html2 = requests.get(host + b[0]['href'],headers=head)
        soup = BeautifulSoup(html2.text, "html5lib")
        b = soup.select("dd  a ", )
        magnetic_link=get_magnetic_link(host+b[0]['href'])
        moviedb.insert(url,host+b[0]['href'],magnetic_link,year, arear, type, name)
        moviedb.commit()
        # return "{},{},{},{}".format('<a href="'+ url+year+'</a>', arear, type, '<a href="' + host + b[0]['href'] + '">'+name+'</a>'+'\n')
        # print( "{},{},{},{}".format('<a href="'+ url+year+'</a>', arear, type, '<a href="' + host + b[0]['href'] + '">'+name+'</a>'+'\n'))

    except Exception as e:
        pass


def url_list(html):
    soup = BeautifulSoup(html, "html5lib")

    name = soup.select("td > a:nth-of-type(6)")
    year = soup.select("td > a:nth-of-type(2)")
    arear = soup.select("td > a:nth-of-type(3)")
    type = soup.select("td > a:nth-of-type(4)")

    arear_list = []
    name_list = []
    url_link = []
    year_list = []
    type_list = []
    for i in type:
        type_list.append(i.text.strip("]["))

    for i in name:
        name_list.append(i.text)
        url_link.append(host + i["href"])
    for i in arear:
        arear_list.append(i.text.strip("]["))

    for i in year:
        year_list.append(i.text.strip("]["))

    items = []
    for i in zip(url_link, year_list, arear_list, type_list, name_list, ):
        if "1080" in i[4]:
            items.append(i)
    return items


host = "http://www.btbtt.net/"


def movie_list(page_num):
    moviedb = MovieDB('moviedb.sqlite')
    g = []
    id = []
    if not page_num or page_num == 1:
        url = host + 'forum-index-fid-951-page-%s.htm' % (str(page_num))

        html = requests.get(url, headers=head)
        soup = BeautifulSoup(html.text, "html5lib")
        a = soup.select(".bg2")
        c = ""
        for i in a:
            for k in i.next_siblings:
                if str(k).strip().split():
                    c = c + str(k)
        items = url_list(c)

        for i in items:
            if moviedb.check_url(i[0]):
                item_id=moviedb.check_url(i[0])
                id.append(item_id)
            else:
                if "百度" in i[4]:
                    try:
                        name = re.split('[(\W)]{2}', i[4])[3]
                    except Exception as e:
                        name = i[4]
                else:
                    try:
                        name = re.split('[(\W)]{2}', i[4])[2]
                    except Exception as e:
                        name=i[4]
                g.append(gevent.spawn(get_data,moviedb, host, i[0], i[1], i[2], i[3], name))



    else:
        url = host + 'forum-index-fid-951-page-%s.htm' % (page_num)
        html = requests.get(url,headers=head)
        soup = BeautifulSoup(html.text, "html5lib")
        a = soup.select(".header")[0]
        c = ""
        for k in a.next_siblings:
            if str(k).strip().split():
                c = c + str(k)
        items = url_list(c)
        for i in items:
            if moviedb.check_url(i[0]):
                item_id = moviedb.check_url(i[0])
                id.append(item_id)
            else:
                if "百度" in i[4]:
                    try:
                        name = re.split('[(\W)]{2}', i[4])[3]
                    except Exception as e:
                        name = i[4]
                else:
                    try:
                        name = re.split('[(\W)]{2}', i[4])[2]
                    except Exception as e:
                        name=i[4]
                g.append(gevent.spawn(get_data,moviedb, host, i[0], i[1], i[2], i[3], name))
    gevent.joinall(g)
    moviedb.close()


if __name__ == '__main__':
    data=movie_list(1)
