import requests
import sqlite3
import re
from bs4 import BeautifulSoup
import concurrent.futures
import time
import django
import send_email


def print_t(x):
    print(f'{time.strftime("%Y-%m-%d %T")} {x}')


headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36"}


def get_github_result(keyword):
    results = []
    #最多获取前10页
    for i in range(1,11):
        url = f"https://github.com/search?p={i}&q={keyword}&type=Repositories"
        try:
            r = requests.get(url, headers=headers, timeout=60)
            repository_href = re.findall(
                '<a class="v-align-middle".*" href="(.*)">', r.text)
            repository_links = ["https://github.com" +
                                url for url in repository_href]
            if len(repository_links) == 0:
                break
            repository_time = re.findall(
                'relative-time.*datetime="(.*)" class="no-wrap".*', r.text)
            
            for repository_url, repository_time in zip(repository_links, repository_time):
                results.append(
                    {'keyword': keyword, 'repository': repository_url, 'create_time': repository_time[0:10]})

            print_t(f"get github page{i} done.")
        except Exception as e:
            print_t("get_github_result: " + e)
    return results


def get_gitee_result(keyword):
    results = []
    host = 'https://gitee.com/'
    # url = f'https://search.gitee.com/?q={keyword}&skin=rec&type=repository'

    # url = f'https://gitee.com/search/relative_project?q={keyword}'
    headers.update({'Referer': host})
    # url = f'https://gitee.com/search/relative_project?q={keyword}'
    # try:
    #     r = requests.get(url,headers = headers,timeout = 60)
    #     ret = r.json()
    #     if ret['status'] == 200:
    #         for data in ret['data']:
    #             results.append({'keyword':keyword,'repository':f"{host}{data['path_ns']}",'create_time':None})
    # except Exception as e:
    #     print_t("get_gitee_result error:",e)

    #最多获取前10页
    for i in range(1,11):
        url = f'https://search.gitee.com/?skin=rec&type=repository&q={keyword}&pageno={i}'
        try:
            r = requests.get(url, headers=headers, timeout=60)
            soup = BeautifulSoup(r.text, 'html.parser')
            target_element = soup.find(id="hits-list")
            if target_element is None:
                break
         
            hrefs = target_element.find_all('a', attrs={'class': 'ns', 'target': '_blank'})
            repository_urls = []
            for href in hrefs:
                # print_t(href['href'])
                repository_urls.append(
                    href['href'].replace('?_from=gitee_search', ''))
            
            if len(repository_urls) == 0:
                break

            update_times = target_element.find_all(
                'a', class_="tag forks theme-hover")
            repository_times = []
            for update_time in update_times:
                t = update_time.next_sibling.next_sibling.string
                t = t.replace("更新于", "").replace(" ", "")
                repository_times.append(t)

            for repository_url, repository_time in zip(repository_urls, repository_times):
                results.append(
                    {'keyword': keyword, 'repository': repository_url, 'create_time': repository_time})
            print_t(f"get gitee page{i} done.")
        except Exception as e:
            print_t("get_gitee_result error:" + e)
    return results


def get_gitlab_result(keyword):
    results = []
    url = f'https://gitlab.com/search?utf8=%E2%9C%93&snippets=&scope=&repository_ref=&search={keyword}'
    try:
        r = requests.get(url, headers=headers, timeout=60)
        soup = BeautifulSoup(r.text, 'html.parser')
        x = soup.find('ul', {'class': 'projects-list'})
        projects = x.find_all('a', {'class': 'text-plain'})
        time = x.find_all('time', {'class': 'js-timeago'})
        for index, p in enumerate(projects):
            results.append({'keyword': keyword, 'repository': "https://gitlab.com" +
                            p.get("href"), 'create_time': time[index].get('datetime', '')})
    except:
        pass

    return results


def check_repostory(keywords):
    values = []
    with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
        to_do = []
        for keyword in keywords:
            to_do.append(executor.submit(get_github_result, keyword))
            to_do.append(executor.submit(get_gitee_result, keyword))
            # to_do.append(executor.submit(get_gitlab_result,keyword))

        for future in concurrent.futures.as_completed(to_do):
            try:
                data = future.result()
                for item in data:
                    print_t(item)
                    print_t(
                        f"keyword: {item['keyword']}, repository_link: {item['repository']}, update_time: {item['create_time']}")
            except Exception as exc:
                print_t('%r generated an exception: %s' % (keyword, exc))
            else:
                values.extend(data)

    conn = sqlite3.connect('project_info.db')
    c = conn.cursor()
    for value in values:
        try:
            c.execute("INSERT INTO Project (repository_link,keyword,create_time,notify_times) VALUES (?,?,?,?)", [
                      value['repository'], value['keyword'], value['create_time'], 0])
        except:
            pass
    conn.commit()
    conn.close()
    print_t("本次监控已完成")


def main(keywords, freq, mail_receivers):
    while True:
        check_repostory(keywords)
        if len(mail_receivers) > 0:
            send_email.send(mail_receivers)
        for i in range(freq):
            time.sleep(60)
