from bs4 import BeautifulSoup
import requests
import urllib
import socket
import os
import threading
import time
import datetime

count = 1
dir = os.path.exists('dd')
if not dir:
    os.makedirs('dd')
os.chdir('dd')


def sleep_time(hour, minute, sec):
    return hour*3600 + minute * 60 + sec


def download( count ):

    url = "http://javplay.com/page/" + str(count) + "/"
    r = requests.get(url)
    html = r.text
    soup = BeautifulSoup(html, 'html.parser')
    articles = soup.find_all('article', class_='entry')
    for article in articles:
        time.sleep(sleep_time(0, 0, 1))
        post_id = article.get('id')
        id = post_id.split('-')[1]
        atag = article.find('h2').find('a')
        title = atag.get('title')
        href = atag.get('href')
        values = {'javinfo_download': '1',
                  'id': id,
                  'server': 'VIP-1'
                  }
        moreR = requests.post("http://javplay.com/wp-content/themes/javinfo/ajax.php", values)
        moreHtml = moreR.text
        moreSoup = BeautifulSoup(moreHtml, 'html.parser')
        linka = moreSoup.find('a')
        try:
            downloadLink = linka.get('href')
        except:
            values = {'javinfo_download': '1',
                  'id': id,
                  'server': 'VIP-1A'
                  }
            moreR = requests.post("http://javplay.com/wp-content/themes/javinfo/ajax.php", values)
            moreHtml = moreR.text
            moreSoup = BeautifulSoup(moreHtml, 'html.parser')
            linka = moreSoup.find('a')
            try:
                downloadLink = linka.get('href')
            except:
                downloadLink = "none"
        print(href)
        print(downloadLink)
        print(title)
        img = article.find('img').get('src')
        print(img)
        file_name = "list"
        #
        # headers = {'DNT': '1', 'Upgrade-Insecure-Requests': '1',
        #            'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
        #            'Cache-Control': 'ax-age=0',
        #            'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_1) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0.1 Safari/602.2.14'}
        # data = None
        # req = urllib.request.Request(img, data, headers)
        try:
            # response = urllib.request.urlopen(req, timeout=5)
            # respHtml = response.read()
            # binfile = open(str(title) + ".jpg", "wb")
            # binfile.write(respHtml)
            # binfile.close()
            with open(file_name+str(count) + '.md', 'a') as file:
                file.write("title: " + title + "\n")
                file.write("url: " + href + "\n")
                file.write("download: " + downloadLink + "\n")
                file.write("!["+title+"]("+img+")"+"\n\n")
        except socket.timeout:
            print("pass")

# now = time.strftime('%m-%d-%H-%M-%S', time.localtime(time.time()))
# print(now)
#download(3)
# now = time.strftime('%m-%d-%H-%M-%S', time.localtime(time.time()))
# print(now)
#
for count in range(2, 330):

    t1 = threading.Thread(target=download, args=[count])
    t1.start()
    time.sleep(sleep_time(0, 0, 1))
    print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!----'+str(count))
    count += 1



# os.chdir('dd')
# f=open('list.txt', 'r')
# socket.setdefaulttimeout(10)
# while True:
#     line = f.readline()
#     if line:
#         print(line)
#     r = requests.get(line)
#     html = r.text
#     soup = BeautifulSoup(html, 'html.parser')
#     title = soup.title.string
#     print(title)
#     imgdiv = soup.find('div',class_='tn-contentmt').find('img')
#     imgsrc= imgdiv.get('src')
#     print(imgsrc)
#     headers = {'DNT': '1', 'Upgrade-Insecure-Requests': '1',
#                'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
#                'Cache-Control': 'ax-age=0',
#                'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_1) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0.1 Safari/602.2.14'}
#     data = None
#     req = urllib.request.Request(imgsrc, data)
#     try:
#         response = urllib.request.urlopen(req, timeout=10)
#         respHtml = response.read()
#         binfile = open(str(title) + ".jpg", "wb")
#         binfile.write(respHtml)
#         binfile.close()
#     except socket.timeout:
#         print("pass")

