import sys
from bs4 import BeautifulSoup
import json
from util import py_dl
from util import py_socket


def parse_url(url, is_file=False):
    # detail = {}
    detail = dict()
    detail['ref'] = url
    if is_file:
        soup = BeautifulSoup(open(url, encoding='utf-8'), 'lxml')
    else:
        html_doc = py_dl.dl_url(url)
        soup = BeautifulSoup(html_doc, 'lxml')
    # detail['title'] = soup.title.string
    detail['label'] = soup.find(class_='post-title').string

    downtips = soup.find(id='down-tipid').contents
    while '\n' in downtips:
        downtips.remove('\n')
    # print(downtips)
    # dl_list = list()
    dl_list = []
    if len(downtips) > 3:
        item = dict()
        item['label'] = downtips[len(downtips) - 2].string
        item['detail'] = downtips[len(downtips) - 2].get('href')
        dl_list.append(item)
    item = dict()
    item['label'] = downtips[len(downtips) - 1].string
    item['detail'] = downtips[len(downtips) - 1].get('href')
    dl_list.append(item)
    detail['detail'] = dl_list

    downfiles = soup.find(class_='down-fileinfo').contents
    while '\n' in downfiles:
        downfiles.remove('\n')
    # print(downfiles)
    detail['addition'] = downfiles[len(downfiles) - 1].string

    '''
    downlist = soup.find(id='down-tipid').contents
    # print(downlist)
        if len(downlist) > 4:
        print(downlist[3].get('href') + ' ' + downlist[3].string)
        print(downlist[5].get('href') + ' ' + downlist[5].string)
    else:
        print(downlist[3].get('href') + ' ' + downlist[3].string)
    '''

    # py_socket.send_msg(json.dumps(detail))
    print(json.dumps(detail))


if __name__ == '__main__':
    if len(sys.argv) < 2:
        print('缺少参数！')
    else:
        input_url = sys.argv[1]
        if input_url.startswith('http'):
            parse_url(input_url)
        else:
            parse_url(input_url, True)
