import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import validators
import os
import time
import random

def GetUrl(channel:str):
    url = 'http://tonkiang.us/?channel={}'.format(channel)
    print("url:",url)

    # Send a GET request to the URL and parse the HTML content
    response = requests.get(url)
    soup = BeautifulSoup(response.content, 'html.parser')
    #print("soup:",soup)
    #liv = {}
    liv_list = []
    for pre_next in soup.find_all('div',class_='resultplus'):
        #print("result:",pre_next)
        reu = pre_next.find_all('tba')
        #print("reu:",reu)
        for abc in reu:
            url_live = str(abc.text).replace(" ","")
            if (validators.url(url_live)):
                #print("test:", url_live)
                liv_list.append(url_live)
    #liv[channel] = liv_list
    return liv_list

def PostGetUrl(channel:str):
    url = 'https://tonkiang.us/?'
    data = {
            'seerch':channel,
            #'Submit':"+",
            #'name':"",
            'city':"a2210315f2.9240177382129",
            }
    post_headers = {
      'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
      'Referer':'https://tonkiang.us/?',
      'Origin':'https://tonkiang.us',
      'Sec-Ch-Ua':'"Not A(Brand";v="99", "Google Chrome";v="121", "Chromium";v="121"',
      'Sec-Ch-Ua-Platform':'Windows',
      'Content-Length':'35',
      'Content-Type': 'application/x-www-form-urlencoded',
      'Cookie':'_ga=GA1.1.356787779.1737890234; HstCfa4853344=1737890235551; __dtsu=4C30173789024006A89018D2D680B47F; c_ref_4853344=https%3A%2F%2Fwww.baidu.com%2Flink%3Furl%3DY9MWYfNA5knBiVh2yv44qTvpHPSeNpFyPhucSipvHT7%26wd%3D%26eqid%3Ddfa8341d001acbb40000000367987c97; FCNEC=%5B%5B%22AKsRol-zWim0w4AXar7gx7CgljhLpPDYRiv0xuvWWzSGFEFZhdWdbAgS5aEBLYXqXmz1ULFtzCi_pHXjN9sx7kPmIQYX-L4AZAe8xOEa8PRjyU_NgpqeUliFIzWe5bQNHmmFAlnzooft4o5de69N6oPOl4rkp1Ohuw%3D%3D%22%5D%5D; _ga_8KY4MGK2FJ=GS1.1.1738049401.1.1.1738052795.0.0.0; ip=39.77.171.74; isp=%E5%B1%B1%E4%B8%9C%E8%81%94%E9%80%9A; REFERER=Gameover; __gads=ID=57517bfc1fa3b072:T=1737890238:RT=1745566876:S=ALNI_Mb8t3G_XCw3XvsMKFOlM5bjydlJRA; __gpi=UID=0000100f3da06022:T=1737890238:RT=1745566876:S=ALNI_MY24sL9CvzY9-v3Lkup53LkTvgmsQ; __eoi=ID=0083e392e8230d9a:T=1737890238:RT=1745566876:S=AA-AfjaTIlTE2z25kBR_OmX1GLzk; HstCla4853344=1745566841380; HstCmu4853344=1745566841380; HstPn4853344=1; HstPt4853344=1; HstCnv4853344=1; HstCns4853344=1; cf_clearance=5aSnzBHXUxhJ11HuGDejoKyvX.G.90cZa9HGBzPVAFU-1745566878-1.2.1.1-Wh9U8wO4nDcb7RNQPochItolCiVXCVs8RFtC3.5NqAQS3syE8GufJXRTdiOk142wqyYAMDwEROWc0ZlnEg5hbbqy1C2loEPPMPKIfZxMVBV9qh6Y9qUpnJ_w27XCZiwHWs0h3mwmOUF_evxjtps1DvO8SpNq41EQRxET7PMWB8ym__U6ZPLMDfgQ5xP1jd3J6dxuu7UKQ9icRRduqBBft4HsIy9IklVkwTNiQjjHfcvTD57Ze8vXERXpUeH7H71TlM9DDGfUYmNo9wzS3NKFPP8qtmYfx5NlhgwDWDGbI3YXx6BvEnLH5aJRCos1kfLG.cTJk1Mo4C_JVtNk4Tz0xyydT9ly3KNuzB.Aqp9soEQ; _ga_JNMLRB3QLF=GS1.1.1745566838.4.1.1745566862.0.0.0',
      'Referer': 'https://tonkiang.us/?',
      'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36',
    }
    response = requests.post(url, data=data, headers=post_headers)

    print(response.text)
    print(response.status_code)
    if response.status_code != 200:
        return None
    else:
        soup = BeautifulSoup(response.text, 'html.parser')
        liv_list = []
        for pre_next in soup.find_all('div',class_='resultplus'):
            reu = pre_next.find_all('tba')
            for abc in reu:
                url_live = str(abc.text).replace(" ","")
                if (validators.url(url_live)):
                    liv_list.append(url_live)
        return liv_list
    


if __name__ == '__main__':
    #text = 'CCTV1'
    #ch_list = PostGetUrl(text)
    #print("ch list:",ch_list)
    
    mod_lines = []
    current_dir = os.path.dirname(os.path.abspath(__file__))
    file_path = os.path.join(current_dir, 'live.txt')
    with open(file_path, 'r',encoding='utf-8') as file:
        lines = file.readlines()
    

    exist_live = []
    lives = {}
    for line in lines:
        #print("line:  ",line)
        if (line.isspace() or line.find("genre") != -1):
            mod_lines.append(line)
        else:
            chnnl_name = line.split(",")[0].replace(" ","")
            chnnl_url = line.split(",")[1]
            if chnnl_url.endswith('\n'):
                chnnl_url = chnnl_url.rstrip('\n')
            #print("ch,url:",(chnnl_name,chnnl_url))
            if not chnnl_name or not chnnl_url:
                mod_lines.append(line)
            else:
                if (chnnl_name in lives):
                    urls = lives[chnnl_name]
                    if urls:
                        update = False
                        for i in urls:
                            if (i in exist_live):
                                continue
                            else:
                                update = True
                                mod_lines.append(chnnl_name + "," + i + "\n")
                                exist_live.append(i)
                        if update == False:
                            mod_lines.append(chnnl_name + "," + chnnl_url + "\n")
                    else:
                        mod_lines.append(chnnl_name + "," + chnnl_url + "\n")
                else:
                    #random_num = random.randint(0, 5)
                    #time.sleep(random_num)
                    url_l = PostGetUrl(chnnl_name)
                    print("url size:",(len(url_l) if url_l else 0))
                    print("list:",(chnnl_name, url_l))
                    lives[chnnl_name] = url_l
                    if url_l:
                        mod_lines.append(chnnl_name + "," + url_l[0] + "\n")
                        exist_live.append(url_l[0])
                    else:
                        mod_lines.append(chnnl_name + "," + chnnl_url + "\n")
    
    with open(file_path, 'w',encoding='utf-8') as file:
       file.writelines(mod_lines)




    #GetUrl(text)
